Compare commits
8 Commits
remove-exa
...
sense-disa
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8406a1e381 | ||
|
|
438e18c78f | ||
|
|
b0cf72f0ec | ||
|
|
fd2aa96e65 | ||
|
|
7239a45ac5 | ||
|
|
7f84cc22e9 | ||
|
|
0db213f993 | ||
|
|
bf5abe2948 |
2
.ghci
@@ -1,2 +1,2 @@
|
||||
:set -isrc/compiler -isrc/binary -isrc/runtime/haskell -isrc/server -isrc/server/transfer -idist/build/autogen -idist/build
|
||||
:set -isrc/compiler -isrc/binary -isrc/runtime/haskell -isrc/server -isrc/example-based -isrc/server/transfer -idist/build/autogen -idist/build
|
||||
:set -fwarn-unused-imports -optP-DSERVER_MODE -optP-DUSE_INTERRUPT -optP-DCC_LAZY -optP-include -optPdist/build/autogen/cabal_macros.h -odir dist/build/gf/gf-tmp -hidir dist/build/gf/gf-tmp -stubdir dist/build/gf/gf-tmp
|
||||
|
||||
3
.gitignore
vendored
@@ -39,7 +39,8 @@ src/runtime/c/sg/.dirstamp
|
||||
src/runtime/c/stamp-h1
|
||||
src/runtime/java/.libs/
|
||||
src/runtime/python/build/
|
||||
src/ui/android/libs/
|
||||
src/ui/android/obj/
|
||||
.cabal-sandbox
|
||||
cabal.sandbox.config
|
||||
.stack-work
|
||||
DATA_DIR
|
||||
|
||||
19
LICENSE
@@ -8,9 +8,24 @@ other. For this reason the different components have different licenses.
|
||||
In summary:
|
||||
|
||||
- the GF compiler in the folder src/compiler and the PGF Web service in src/server
|
||||
are under the GNU GENERAL PUBLIC LICENSE.
|
||||
are under the GNU GENERAL PUBLIC LICENSE.
|
||||
|
||||
- the GF runtime in src/runtime is under dual GNU LESSER GENERAL PUBLIC LICENSE and BSD LICENSE
|
||||
- the GF runtime in src/runtime is under dual GNU LESSER GENERAL PUBLIC LICENSE
|
||||
and BSD LICENSE
|
||||
|
||||
- the resource grammar library in lib/src is under GNU LESSER GENERAL PUBLIC LICENSE.
|
||||
However the user have the right to choose any license for any application grammar
|
||||
derived from the resource grammar by using the grammar API.
|
||||
|
||||
- the resource grammar library also includes large coverage lexicons for some languages.
|
||||
Since these lexicons are derived from external sources they might be under different licenses.
|
||||
Look at the source file for every lexicon for details. The lexicons that we currently have
|
||||
are:
|
||||
|
||||
lib/src/bulgarian/ DictBul.gf DictBulAbs.gf for Bulgarian
|
||||
lib/src/english/ DictEng.gf DictEngAbs.gf for English
|
||||
lib/src/turkish/ DictTur.gf DictTurAbs.gf for Turkish
|
||||
lib/src/swedish/ DictSwe.gf DictSweAbs.gf for Swedish
|
||||
|
||||
The rest of this document contains copies of the GPL, LGPL and BSD licenses
|
||||
which are applicable to the different components of Grammatical Framework
|
||||
|
||||
@@ -70,7 +70,7 @@ buildWeb gf flags (pkg,lbi) = do
|
||||
gf_lib_path = datadir (absoluteInstallDirs pkg lbi dest) </> "lib"
|
||||
args = numJobs flags++["-make","-s"] -- ,"-optimize-pgf"
|
||||
++["--gfo-dir="++tmp_dir,
|
||||
--"--gf-lib-path="++gf_lib_path,
|
||||
"--gf-lib-path="++gf_lib_path,
|
||||
"--name="++dropExtension pgf,
|
||||
"--output-dir="++gfo_dir]
|
||||
++[dir</>file|file<-src]
|
||||
|
||||
@@ -67,27 +67,10 @@ fi
|
||||
cabal install --only-dependencies -fserver -fc-runtime $extra
|
||||
cabal configure --prefix="$prefix" -fserver -fc-runtime $extra
|
||||
DYLD_LIBRARY_PATH="$extralib" LD_LIBRARY_PATH="$extralib" cabal build
|
||||
# Building the example grammars will fail, because the RGL is missing
|
||||
cabal copy --destdir="$destdir" # create www directory
|
||||
|
||||
## Build the RGL and copy it to $destdir
|
||||
PATH=$PWD/dist/build/gf:$PATH
|
||||
export GF_LIB_PATH="$(dirname $(find "$destdir" -name www))/lib" # hmm
|
||||
mkdir -p "$GF_LIB_PATH"
|
||||
pushd ../gf-rgl
|
||||
make build
|
||||
make copy
|
||||
popd
|
||||
|
||||
# Build GF again, including example grammars that need the RGL
|
||||
DYLD_LIBRARY_PATH="$extralib" LD_LIBRARY_PATH="$extralib" cabal build
|
||||
|
||||
## Copy GF to $destdir
|
||||
cabal copy --destdir="$destdir"
|
||||
libdir=$(dirname $(find "$destdir" -name PGF.hi))
|
||||
cabal register --gen-pkg-config=$libdir/gf-$ver.conf
|
||||
|
||||
## Create the binary distribution package
|
||||
case $fmt in
|
||||
tar.gz)
|
||||
targz="$name-bin-$hw-$os.tar.gz" # the final tar file
|
||||
|
||||
@@ -17,7 +17,10 @@ h1 img.nofloat { float: none; }
|
||||
img.right { float: right; }
|
||||
|
||||
ol.languages {
|
||||
column-width: 12em;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
flex-wrap: wrap;
|
||||
height: 12em;
|
||||
}
|
||||
|
||||
.grow {
|
||||
|
||||
@@ -3120,44 +3120,23 @@ a part of the GF grammar compiler.
|
||||
<TR>
|
||||
<TD><CODE>nonExist</CODE></TD>
|
||||
<TD><CODE>Str</CODE></TD>
|
||||
<TD>a special token marking<BR/>
|
||||
<TD>this is a special token marking<BR/>
|
||||
non-existing morphological forms</TD>
|
||||
</TR>
|
||||
<TR>
|
||||
<TD><CODE>BIND</CODE></TD>
|
||||
<TD><CODE>Str</CODE></TD>
|
||||
<TD>a special token marking<BR/>
|
||||
<TD>this is a special token marking<BR/>
|
||||
that the surrounding tokens should not<BR/>
|
||||
be separated by space</TD>
|
||||
</TR>
|
||||
<TR>
|
||||
<TD><CODE>SOFT_BIND</CODE></TD>
|
||||
<TD><CODE>Str</CODE></TD>
|
||||
<TD>a special token marking<BR/>
|
||||
<TD>this is a special token marking<BR/>
|
||||
that the surrounding tokens may not<BR/>
|
||||
be separated by space</TD>
|
||||
</TR>
|
||||
<TR>
|
||||
<TD><CODE>SOFT_SPACE</CODE></TD>
|
||||
<TD><CODE>Str</CODE></TD>
|
||||
<TD>a special token marking<BR/>
|
||||
that the space between the surrounding tokens<BR/>
|
||||
is optional</TD>
|
||||
</TR>
|
||||
<TR>
|
||||
<TD><CODE>CAPIT</CODE></TD>
|
||||
<TD><CODE>Str</CODE></TD>
|
||||
<TD>a special token marking<BR/>
|
||||
that the first character in the next token<BR/>
|
||||
should be capitalized</TD>
|
||||
</TR>
|
||||
<TR>
|
||||
<TD><CODE>ALL_CAPIT</CODE></TD>
|
||||
<TD><CODE>Str</CODE></TD>
|
||||
<TD>a special token marking<BR/>
|
||||
that the next word should be<BR/>
|
||||
in all capital letters</TD>
|
||||
</TR>
|
||||
</TABLE>
|
||||
|
||||
<P></P>
|
||||
|
||||
@@ -176,11 +176,6 @@ The above notes for installing from source apply also in these cases.
|
||||
For more info on working with the GF source code, see the
|
||||
[GF Developers Guide ../doc/gf-developers.html].
|
||||
|
||||
==Using Stack==
|
||||
|
||||
You can also use [Stack https://www.haskellstack.org] to compile GF, just replace ``cabal install`` above
|
||||
with ``stack install`` (assuming you already have Stack set up).
|
||||
|
||||
==Older releases==
|
||||
|
||||
- [GF 3.8 index-3.8.html] (June 2016)
|
||||
|
||||
32
gf.cabal
@@ -1,5 +1,5 @@
|
||||
name: gf
|
||||
version: 3.10
|
||||
version: 3.9-git
|
||||
|
||||
cabal-version: >= 1.22
|
||||
build-type: Custom
|
||||
@@ -42,7 +42,7 @@ data-files:
|
||||
custom-setup
|
||||
setup-depends:
|
||||
base,
|
||||
Cabal >=1.22.0.0,
|
||||
Cabal >=1.4.0.0,
|
||||
directory,
|
||||
filepath,
|
||||
process >=1.0.1.1
|
||||
@@ -67,6 +67,11 @@ flag network-uri
|
||||
-- Description: Make -new-comp the default
|
||||
-- Default: True
|
||||
|
||||
flag custom-binary
|
||||
Description: Use a customised version of the binary package
|
||||
Default: True
|
||||
Manual: True
|
||||
|
||||
flag c-runtime
|
||||
Description: Include functionality from the C run-time library (which must be installed already)
|
||||
Default: False
|
||||
@@ -84,14 +89,17 @@ Library
|
||||
exceptions
|
||||
hs-source-dirs: src/runtime/haskell
|
||||
|
||||
other-modules:
|
||||
-- not really part of GF but I have changed the original binary library
|
||||
-- and we have to keep the copy for now.
|
||||
Data.Binary
|
||||
Data.Binary.Put
|
||||
Data.Binary.Get
|
||||
Data.Binary.Builder
|
||||
Data.Binary.IEEE754
|
||||
if flag(custom-binary)
|
||||
other-modules:
|
||||
-- not really part of GF but I have changed the original binary library
|
||||
-- and we have to keep the copy for now.
|
||||
Data.Binary
|
||||
Data.Binary.Put
|
||||
Data.Binary.Get
|
||||
Data.Binary.Builder
|
||||
Data.Binary.IEEE754
|
||||
else
|
||||
build-depends: binary, data-binary-ieee754
|
||||
|
||||
--ghc-options: -fwarn-unused-imports
|
||||
--if impl(ghc>=7.8)
|
||||
@@ -287,7 +295,9 @@ Library
|
||||
CGIUtils
|
||||
Cache
|
||||
Fold
|
||||
hs-source-dirs: src/server src/server/transfer
|
||||
ExampleDemo
|
||||
ExampleService
|
||||
hs-source-dirs: src/server src/server/transfer src/example-based
|
||||
|
||||
if flag(interrupt)
|
||||
cpp-options: -DUSE_INTERRUPT
|
||||
|
||||
@@ -80,7 +80,7 @@ function sitesearch() {
|
||||
<ul>
|
||||
<li><a href="http://hackage.haskell.org/package/gf-3.9/docs/PGF.html">PGF library API (Old Runtime)</a>
|
||||
<li><a href="doc/runtime-api.html">PGF library API (New Runtime)</a>
|
||||
<li><a href="https://github.com/GrammaticalFramework/gf-offline-translator/tree/master/android">GF on Android (new)</a>
|
||||
<li><a href="src/ui/android/README">GF on Android (new)</a>
|
||||
<li><A HREF="/android/">GF on Android (old) </A>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
@@ -723,7 +723,7 @@ pgfCommands = Map.fromList [
|
||||
case toExprs arg of
|
||||
[EFun id] -> case Map.lookup id (funs (abstract pgf)) of
|
||||
Just fd -> do putStrLn $ render (ppFun id fd)
|
||||
let (_,_,_,prob) = fd
|
||||
let (_,_,_,_,prob) = fd
|
||||
putStrLn ("Probability: "++show prob)
|
||||
return void
|
||||
Nothing -> case Map.lookup id (cats (abstract pgf)) of
|
||||
@@ -732,7 +732,7 @@ pgfCommands = Map.fromList [
|
||||
if null (functionsToCat pgf id)
|
||||
then empty
|
||||
else ' ' $$
|
||||
vcat [ppFun fid (ty,0,Just ([],[]),0) | (fid,ty) <- functionsToCat pgf id] $$
|
||||
vcat [ppFun fid (ty,[],0,Just ([],[]),0) | (fid,ty) <- functionsToCat pgf id] $$
|
||||
' ')
|
||||
let (_,_,prob) = cd
|
||||
putStrLn ("Probability: "++show prob)
|
||||
@@ -909,7 +909,7 @@ pgfCommands = Map.fromList [
|
||||
| otherwise = do fmt <- readOutputFormat (valStrOpts "printer" "pgf_pretty" opts)
|
||||
return $ fromString $ concatMap snd $ exportPGF noOptions fmt pgf
|
||||
|
||||
funsigs pgf = [(f,ty) | (f,(ty,_,_,_)) <- Map.assocs (funs (abstract pgf))]
|
||||
funsigs pgf = [(f,ty) | (f,(ty,_,_,_,_)) <- Map.assocs (funs (abstract pgf))]
|
||||
showFun (f,ty) = showCId f ++ " : " ++ showType [] ty ++ " ;"
|
||||
|
||||
morphos (Env pgf mos) opts s =
|
||||
|
||||
@@ -35,7 +35,7 @@ cf2abstr cfg = Abstr aflags afuns acats
|
||||
| (cat,rules) <- (Map.toList . Map.fromListWith (++))
|
||||
[(cat2id cat, catRules cfg cat) |
|
||||
cat <- allCats' cfg]]
|
||||
afuns = Map.fromList [(mkRuleName rule, (cftype [cat2id c | NonTerminal c <- ruleRhs rule] (cat2id (ruleLhs rule)), 0, Nothing, 0))
|
||||
afuns = Map.fromList [(mkRuleName rule, (cftype [cat2id c | NonTerminal c <- ruleRhs rule] (cat2id (ruleLhs rule)), [], 0, Nothing, 0))
|
||||
| rule <- allRules cfg]
|
||||
|
||||
cat2id = mkCId . fst
|
||||
@@ -56,7 +56,7 @@ cf2concr cfg = Concr Map.empty Map.empty
|
||||
map mkSequence rules)
|
||||
sequences = listArray (0,Set.size sequences0-1) (Set.toList sequences0)
|
||||
|
||||
idFun = CncFun wildCId (listArray (0,0) [seqid])
|
||||
idFun = CncFun [wildCId] (listArray (0,0) [seqid])
|
||||
where
|
||||
seq = listArray (0,0) [SymCat 0 0]
|
||||
seqid = binSearch seq sequences (bounds sequences)
|
||||
@@ -77,7 +77,7 @@ cf2concr cfg = Concr Map.empty Map.empty
|
||||
let args = [PArg [] (cat2arg c) | NonTerminal c <- ruleRhs rule]
|
||||
prod = PApply funid args
|
||||
seqid = binSearch (mkSequence rule) sequences (bounds sequences)
|
||||
fun = CncFun (mkRuleName rule) (listArray (0,0) [seqid])
|
||||
fun = CncFun [mkRuleName rule] (listArray (0,0) [seqid])
|
||||
funid' = funid+1
|
||||
in funid' `seq` ((funid',fun:funs),let (c,ps) = ruleLhs rule in [(cat2fid c p, prod) | p <- ps])
|
||||
|
||||
|
||||
@@ -6,18 +6,15 @@ import GF.Compile.GeneratePMCFG
|
||||
import GF.Compile.GenerateBC
|
||||
|
||||
import PGF(CId,mkCId,utf8CId)
|
||||
import PGF.Internal(fidInt,fidFloat,fidString,fidVar)
|
||||
import PGF.Internal(fidInt,fidFloat,fidString,fidVar,DepPragma(..))
|
||||
import PGF.Internal(updateProductionIndices)
|
||||
--import qualified PGF.Macros as CM
|
||||
import qualified PGF.Internal as C
|
||||
import qualified PGF.Internal as D
|
||||
import GF.Grammar.Predef
|
||||
--import GF.Grammar.Printer
|
||||
import GF.Grammar.Grammar
|
||||
import qualified GF.Grammar.Lookup as Look
|
||||
import qualified GF.Grammar as A
|
||||
import qualified GF.Grammar.Macros as GM
|
||||
--import GF.Compile.GeneratePMCFG
|
||||
|
||||
import GF.Infra.Ident
|
||||
import GF.Infra.Option
|
||||
@@ -25,20 +22,24 @@ import GF.Infra.UseIO (IOE)
|
||||
import GF.Data.Operations
|
||||
|
||||
import Data.List
|
||||
import Data.Maybe (fromMaybe)
|
||||
import qualified Data.Set as Set
|
||||
import qualified Data.Map as Map
|
||||
import qualified Data.IntMap as IntMap
|
||||
import Data.Array.IArray
|
||||
|
||||
mkCanon2pgf :: Options -> SourceGrammar -> ModuleName -> IOE D.PGF
|
||||
mkCanon2pgf :: Options -> SourceGrammar -> ModuleName -> IOE C.PGF
|
||||
mkCanon2pgf opts gr am = do
|
||||
(an,abs) <- mkAbstr am
|
||||
depconf <- case flag optLabelsFile opts of
|
||||
Nothing -> return Map.empty
|
||||
Just fpath -> readDepConfig fpath
|
||||
(an,abs) <- mkAbstr am depconf
|
||||
cncs <- mapM mkConcr (allConcretes gr am)
|
||||
return $ updateProductionIndices (D.PGF Map.empty an abs (Map.fromList cncs))
|
||||
return $ updateProductionIndices (C.PGF Map.empty an abs (Map.fromList cncs))
|
||||
where
|
||||
cenv = resourceValues opts gr
|
||||
|
||||
mkAbstr am = return (mi2i am, D.Abstr flags funs cats)
|
||||
mkAbstr am depconf = return (mi2i am, C.Abstr flags funs cats)
|
||||
where
|
||||
aflags = err (const noOptions) mflags (lookupModule gr am)
|
||||
|
||||
@@ -48,7 +49,7 @@ mkCanon2pgf opts gr am = do
|
||||
|
||||
flags = Map.fromList [(mkCId f,x) | (f,x) <- optionsPGF aflags]
|
||||
|
||||
funs = Map.fromList [(i2i f, (mkType [] ty, arity, mkDef gr arity mdef, 0)) |
|
||||
funs = Map.fromList [(i2i f, (mkType [] ty, fromMaybe [] (Map.lookup (i2i f) depconf), arity, mkDef gr arity mdef, 0)) |
|
||||
((m,f),AbsFun (Just (L _ ty)) ma mdef _) <- adefs,
|
||||
let arity = mkArity ma mdef ty]
|
||||
|
||||
@@ -78,7 +79,7 @@ mkCanon2pgf opts gr am = do
|
||||
= genCncFuns gr am cm ex_seqs_arr seqs cdefs fid_cnt1 cnccats
|
||||
|
||||
printnames = genPrintNames cdefs
|
||||
return (mi2i cm, D.Concr flags
|
||||
return (mi2i cm, C.Concr flags
|
||||
printnames
|
||||
cncfuns
|
||||
lindefs
|
||||
@@ -189,54 +190,80 @@ genCncFuns :: Grammar
|
||||
-> Array SeqId Sequence
|
||||
-> [(QIdent, Info)]
|
||||
-> FId
|
||||
-> Map.Map CId D.CncCat
|
||||
-> Map.Map CId C.CncCat
|
||||
-> (FId,
|
||||
IntMap.IntMap (Set.Set D.Production),
|
||||
IntMap.IntMap (Set.Set C.Production),
|
||||
IntMap.IntMap [FunId],
|
||||
IntMap.IntMap [FunId],
|
||||
Array FunId D.CncFun)
|
||||
Array FunId C.CncFun)
|
||||
genCncFuns gr am cm ex_seqs seqs cdefs fid_cnt cnccats =
|
||||
let (fid_cnt1,funs_cnt1,funs1,lindefs,linrefs) = mkCncCats cdefs fid_cnt 0 [] IntMap.empty IntMap.empty
|
||||
(fid_cnt2,funs_cnt2,funs2,prods) = mkCncFuns cdefs fid_cnt1 funs_cnt1 funs1 lindefs Map.empty IntMap.empty
|
||||
in (fid_cnt2,prods,lindefs,linrefs,array (0,funs_cnt2-1) funs2)
|
||||
let (fid_cnt1,lindefs,linrefs,fun_st1) = mkCncCats cdefs fid_cnt IntMap.empty IntMap.empty Map.empty
|
||||
((fid_cnt2,crc,prods),fun_st2) = mkCncFuns cdefs lindefs ((fid_cnt1,Map.empty,IntMap.empty),fun_st1)
|
||||
in (fid_cnt2,prods,lindefs,linrefs,array (0,Map.size fun_st2-1) (Map.elems fun_st2))
|
||||
where
|
||||
mkCncCats [] fid_cnt funs_cnt funs lindefs linrefs =
|
||||
(fid_cnt,funs_cnt,funs,lindefs,linrefs)
|
||||
mkCncCats (((m,id),CncCat _ _ _ _ (Just (PMCFG prods0 funs0))):cdefs) fid_cnt funs_cnt funs lindefs linrefs =
|
||||
let !funs_cnt' = let (s_funid, e_funid) = bounds funs0
|
||||
in funs_cnt+(e_funid-s_funid+1)
|
||||
lindefs' = foldl' (toLinDef (am,id) funs_cnt) lindefs prods0
|
||||
linrefs' = foldl' (toLinRef (am,id) funs_cnt) linrefs prods0
|
||||
funs' = foldl' (toCncFun funs_cnt (m,mkLinDefId id)) funs (assocs funs0)
|
||||
in mkCncCats cdefs fid_cnt funs_cnt' funs' lindefs' linrefs'
|
||||
mkCncCats (_ :cdefs) fid_cnt funs_cnt funs lindefs linrefs =
|
||||
mkCncCats cdefs fid_cnt funs_cnt funs lindefs linrefs
|
||||
mkCncCats [] fid_cnt lindefs linrefs fun_st =
|
||||
(fid_cnt,lindefs,linrefs,fun_st)
|
||||
mkCncCats (((m,id),CncCat _ _ _ _ (Just (PMCFG prods0 funs0))):cdefs) fid_cnt lindefs linrefs fun_st =
|
||||
let mseqs = case lookupModule gr m of
|
||||
Ok (ModInfo{mseqs=Just mseqs}) -> mseqs
|
||||
_ -> ex_seqs
|
||||
(lindefs',fun_st1) = foldl' (toLinDef (m,id) funs0 mseqs) (lindefs,fun_st ) prods0
|
||||
(linrefs',fun_st2) = foldl' (toLinRef (m,id) funs0 mseqs) (linrefs,fun_st1) prods0
|
||||
in mkCncCats cdefs fid_cnt lindefs' linrefs' fun_st2
|
||||
mkCncCats (_ :cdefs) fid_cnt lindefs linrefs fun_st =
|
||||
mkCncCats cdefs fid_cnt lindefs linrefs fun_st
|
||||
|
||||
mkCncFuns [] fid_cnt funs_cnt funs lindefs crc prods =
|
||||
(fid_cnt,funs_cnt,funs,prods)
|
||||
mkCncFuns (((m,id),CncFun _ _ _ (Just (PMCFG prods0 funs0))):cdefs) fid_cnt funs_cnt funs lindefs crc prods =
|
||||
let ---Ok ty_C = fmap GM.typeForm (Look.lookupFunType gr am id)
|
||||
ty_C = err error (\x -> x) $ fmap GM.typeForm (Look.lookupFunType gr am id)
|
||||
!funs_cnt' = let (s_funid, e_funid) = bounds funs0
|
||||
in funs_cnt+(e_funid-s_funid+1)
|
||||
!(fid_cnt',crc',prods')
|
||||
= foldl' (toProd lindefs ty_C funs_cnt)
|
||||
(fid_cnt,crc,prods) prods0
|
||||
funs' = foldl' (toCncFun funs_cnt (m,id)) funs (assocs funs0)
|
||||
in mkCncFuns cdefs fid_cnt' funs_cnt' funs' lindefs crc' prods'
|
||||
mkCncFuns (_ :cdefs) fid_cnt funs_cnt funs lindefs crc prods =
|
||||
mkCncFuns cdefs fid_cnt funs_cnt funs lindefs crc prods
|
||||
mkCncFuns [] lindefs st = st
|
||||
mkCncFuns (((m,id),CncFun _ _ _ (Just (PMCFG prods0 funs0))):cdefs) lindefs st =
|
||||
let ty_C = err error (\x -> x) $ fmap GM.typeForm (Look.lookupFunType gr am id)
|
||||
mseqs = case lookupModule gr m of
|
||||
Ok (ModInfo{mseqs=Just mseqs}) -> mseqs
|
||||
_ -> ex_seqs
|
||||
bundles = [([(args0,res0) | Production res0 funid0 args0 <- prods0, funid0==funid],lins) | (funid,lins) <- assocs funs0]
|
||||
!st' = foldl' (toProd id lindefs mseqs ty_C) st bundles
|
||||
in mkCncFuns cdefs lindefs st'
|
||||
mkCncFuns (_ :cdefs) lindefs st =
|
||||
mkCncFuns cdefs lindefs st
|
||||
|
||||
toProd lindefs (ctxt_C,res_C,_) offs st (Production fid0 funid0 args0) =
|
||||
let !((fid_cnt,crc,prods),args) = mapAccumL mkArg st (zip ctxt_C args0)
|
||||
set0 = Set.fromList (map (C.PApply (offs+funid0)) (sequence args))
|
||||
fid = mkFId res_C fid0
|
||||
!prods' = case IntMap.lookup fid prods of
|
||||
Just set -> IntMap.insert fid (Set.union set0 set) prods
|
||||
Nothing -> IntMap.insert fid set0 prods
|
||||
in (fid_cnt,crc,prods')
|
||||
toLinDef mid funs0 mseqs st@(lindefs,fun_st) (Production res0 funid0 [arg0])
|
||||
| arg0 == [fidVar] =
|
||||
let res = mkFId mid res0
|
||||
|
||||
lins = amap (newSeqId mseqs) (funs0 ! funid0)
|
||||
|
||||
!funid = Map.size fun_st
|
||||
!fun_st' = Map.insert ([([C.PArg [] fidVar],res)],lins) (funid, C.CncFun [] lins) fun_st
|
||||
|
||||
!lindefs' = IntMap.insertWith (++) res [funid] lindefs
|
||||
in (lindefs',fun_st')
|
||||
toLinDef res funs0 mseqs st _ = st
|
||||
|
||||
toLinRef mid funs0 mseqs st (Production res0 funid0 [arg0])
|
||||
| res0 == fidVar =
|
||||
let arg = map (mkFId mid) arg0
|
||||
|
||||
lins = amap (newSeqId mseqs) (funs0 ! funid0)
|
||||
|
||||
in foldr (\arg (linrefs,fun_st) ->
|
||||
let !funid = Map.size fun_st
|
||||
!fun_st' = Map.insert ([([C.PArg [] arg],fidVar)],lins) (funid, C.CncFun [] lins) fun_st
|
||||
|
||||
!linrefs' = IntMap.insertWith (++) arg [funid] linrefs
|
||||
in (linrefs',fun_st'))
|
||||
st arg
|
||||
toLinRef res funs0 mseqs st _ = st
|
||||
|
||||
toProd id lindefs mseqs (ctxt_C,res_C,_) (prod_st,fun_st) (sigs0,lins0) =
|
||||
let (prod_st',sigs) = mapAccumL mkCncSig prod_st sigs0
|
||||
lins = amap (newSeqId mseqs) lins0
|
||||
in addBundle id (prod_st',fun_st) (concat sigs,lins)
|
||||
where
|
||||
mkArg st@(fid_cnt,crc,prods) ((_,_,ty),fid0s ) =
|
||||
mkCncSig prod_st (args0,res0) =
|
||||
let !(prod_st',args) = mapAccumL mkArg prod_st (zip ctxt_C args0)
|
||||
res = mkFId res_C res0
|
||||
in (prod_st',[(args,res) | args <- sequence args])
|
||||
|
||||
mkArg st@(fid_cnt,crc,prods) ((_,_,ty),fid0s) =
|
||||
case fid0s of
|
||||
[fid0] -> (st,map (flip C.PArg (mkFId arg_C fid0)) ctxt)
|
||||
fid0s -> case Map.lookup fids crc of
|
||||
@@ -246,43 +273,16 @@ genCncFuns gr am cm ex_seqs seqs cdefs fid_cnt cnccats =
|
||||
in ((fid_cnt+1,crc',prods'),map (flip C.PArg fid_cnt) ctxt)
|
||||
where
|
||||
(hargs_C,arg_C) = GM.catSkeleton ty
|
||||
ctxt = mapM (mkCtxt lindefs) hargs_C
|
||||
ctxt = mapM mkCtxt hargs_C
|
||||
fids = map (mkFId arg_C) fid0s
|
||||
|
||||
mkLinDefId id = prefixIdent "lindef " id
|
||||
mkCtxt (_,cat) =
|
||||
case Map.lookup (i2i cat) cnccats of
|
||||
Just (C.CncCat s e _) -> [(C.fidVar,fid) | fid <- [s..e], Just _ <- [IntMap.lookup fid lindefs]]
|
||||
Nothing -> error "GrammarToPGF.mkCtxt failed"
|
||||
|
||||
toLinDef res offs lindefs (Production fid0 funid0 args) =
|
||||
if args == [[fidVar]]
|
||||
then IntMap.insertWith (++) fid [offs+funid0] lindefs
|
||||
else lindefs
|
||||
newSeqId mseqs i = binSearch (mseqs ! i) seqs (bounds seqs)
|
||||
where
|
||||
fid = mkFId res fid0
|
||||
|
||||
toLinRef res offs linrefs (Production fid0 funid0 [fargs]) =
|
||||
if fid0 == fidVar
|
||||
then foldr (\fid -> IntMap.insertWith (++) fid [offs+funid0]) linrefs fids
|
||||
else linrefs
|
||||
where
|
||||
fids = map (mkFId res) fargs
|
||||
|
||||
mkFId (_,cat) fid0 =
|
||||
case Map.lookup (i2i cat) cnccats of
|
||||
Just (C.CncCat s e _) -> s+fid0
|
||||
Nothing -> error ("GrammarToPGF.mkFId: missing category "++showIdent cat)
|
||||
|
||||
mkCtxt lindefs (_,cat) =
|
||||
case Map.lookup (i2i cat) cnccats of
|
||||
Just (C.CncCat s e _) -> [(C.fidVar,fid) | fid <- [s..e], Just _ <- [IntMap.lookup fid lindefs]]
|
||||
Nothing -> error "GrammarToPGF.mkCtxt failed"
|
||||
|
||||
toCncFun offs (m,id) funs (funid0,lins0) =
|
||||
let mseqs = case lookupModule gr m of
|
||||
Ok (ModInfo{mseqs=Just mseqs}) -> mseqs
|
||||
_ -> ex_seqs
|
||||
in (offs+funid0,C.CncFun (i2i id) (amap (newIndex mseqs) lins0)):funs
|
||||
where
|
||||
newIndex mseqs i = binSearch (mseqs ! i) seqs (bounds seqs)
|
||||
|
||||
binSearch v arr (i,j)
|
||||
| i <= j = case compare v (arr ! k) of
|
||||
LT -> binSearch v arr (i,k-1)
|
||||
@@ -292,6 +292,24 @@ genCncFuns gr am cm ex_seqs seqs cdefs fid_cnt cnccats =
|
||||
where
|
||||
k = (i+j) `div` 2
|
||||
|
||||
addBundle id ((fid_cnt,crc,prods),fun_st) bundle@(sigs,lins) =
|
||||
case Map.lookup bundle fun_st of
|
||||
Just (funid, C.CncFun funs lins) ->
|
||||
let !fun_st' = Map.insert bundle (funid, C.CncFun (i2i id:funs) lins) fun_st
|
||||
!prods' = foldl' (\prods (args,res) -> IntMap.insert res (Set.singleton (C.PApply funid args)) prods) prods sigs
|
||||
in ((fid_cnt,crc,prods'),fun_st')
|
||||
Nothing ->
|
||||
let !funid = Map.size fun_st
|
||||
!fun_st' = Map.insert bundle (funid, C.CncFun [i2i id] lins) fun_st
|
||||
!prods' = foldl' (\prods (args,res) -> IntMap.insert res (Set.singleton (C.PApply funid args)) prods) prods sigs
|
||||
in ((fid_cnt,crc,prods'),fun_st')
|
||||
|
||||
mkFId (_,cat) fid0 =
|
||||
case Map.lookup (i2i cat) cnccats of
|
||||
Just (C.CncCat s e _) -> s+fid0
|
||||
Nothing -> error ("GrammarToPGF.mkFId: missing category "++showIdent cat)
|
||||
|
||||
|
||||
genPrintNames cdefs =
|
||||
Map.fromAscList [(i2i id, name) | ((m,id),info) <- cdefs, name <- prn info]
|
||||
where
|
||||
@@ -306,3 +324,29 @@ genPrintNames cdefs =
|
||||
--mkArray lst = listArray (0,length lst-1) lst
|
||||
mkMapArray map = array (0,Map.size map-1) [(v,k) | (k,v) <- Map.toList map]
|
||||
mkSetArray set = listArray (0,Set.size set-1) [v | v <- Set.toList set]
|
||||
|
||||
|
||||
|
||||
readDepConfig :: FilePath -> IO (Map.Map CId [DepPragma])
|
||||
readDepConfig fpath =
|
||||
fmap (Map.fromList . concatMap toEntry . lines) $ readFile fpath
|
||||
where
|
||||
toEntry l =
|
||||
case words l of
|
||||
[] -> []
|
||||
("--":_) -> []
|
||||
(fun:ws) -> [(mkCId fun,[toPragma w | w <- ws])]
|
||||
|
||||
toPragma "head" = Head 0 ""
|
||||
toPragma ('h':'e':'a':'d':':':cs) =
|
||||
case break (==':') cs of
|
||||
(lbl,[] ) -> Head 0 lbl
|
||||
(lbl,':':cs) -> Head (read cs) lbl
|
||||
toPragma "rel" = Rel 0
|
||||
toPragma ('r':'e':'l':':':cs) = Rel (read cs)
|
||||
toPragma "_" = Skip
|
||||
toPragma "anchor" = Anch
|
||||
toPragma s =
|
||||
case break (==':') s of
|
||||
(lbl,[] ) -> Mod 0 lbl
|
||||
(lbl,':':cs) -> Mod (read cs) lbl
|
||||
|
||||
@@ -273,7 +273,7 @@ hSkeleton gr =
|
||||
fns = groupBy valtypg (sortBy valtyps (map jty (Map.assocs (funs (abstract gr)))))
|
||||
valtyps (_, (_,x)) (_, (_,y)) = compare x y
|
||||
valtypg (_, (_,x)) (_, (_,y)) = x == y
|
||||
jty (f,(ty,_,_,_)) = (f,catSkeleton ty)
|
||||
jty (f,(ty,_,_,_,_)) = (f,catSkeleton ty)
|
||||
{-
|
||||
updateSkeleton :: OIdent -> HSkeleton -> (OIdent, [OIdent]) -> HSkeleton
|
||||
updateSkeleton cat skel rule =
|
||||
|
||||
@@ -32,8 +32,8 @@ pgf2js pgf =
|
||||
abstract2js :: String -> Abstr -> JS.Expr
|
||||
abstract2js start ds = new "GFAbstract" [JS.EStr start, JS.EObj $ map absdef2js (Map.assocs (funs ds))]
|
||||
|
||||
absdef2js :: (CId,(Type,Int,Maybe ([Equation],[[M.Instr]]),Double)) -> JS.Property
|
||||
absdef2js (f,(typ,_,_,_)) =
|
||||
absdef2js :: (CId,(Type,[DepPragma],Int,Maybe ([Equation],[[M.Instr]]),Double)) -> JS.Property
|
||||
absdef2js (f,(typ,_,_,_,_)) =
|
||||
let (args,cat) = M.catSkeleton typ in
|
||||
JS.Prop (JS.IdentPropName (JS.Ident (showCId f))) (new "Type" [JS.EArray [JS.EStr (showCId x) | x <- args], JS.EStr (showCId cat)])
|
||||
|
||||
@@ -78,7 +78,7 @@ frule2js (PCoerce arg) = new "Coerce" [JS.EInt arg]
|
||||
|
||||
farg2js (PArg hypos fid) = new "PArg" (map (JS.EInt . snd) hypos ++ [JS.EInt fid])
|
||||
|
||||
ffun2js (CncFun f lins) = new "CncFun" [JS.EStr (showCId f), JS.EArray (map JS.EInt (Array.elems lins))]
|
||||
ffun2js (CncFun fns lins) = new "CncFun" [JS.EArray (map (JS.EStr . showCId) fns), JS.EArray (map JS.EInt (Array.elems lins))]
|
||||
|
||||
seq2js :: Array.Array DotPos Symbol -> JS.Expr
|
||||
seq2js seq = JS.EArray [sym2js s | s <- Array.elems seq]
|
||||
|
||||
@@ -54,11 +54,11 @@ plAbstract name abs
|
||||
let args = reverse [EFun x | (_,x) <- subst]] ++++
|
||||
plFacts name "fun" 3 "(?Fun, ?Type, ?[X:Type,...])"
|
||||
[[plp fun, plType cat args, plHypos hypos] |
|
||||
(fun, (typ, _, _, _)) <- Map.assocs (funs abs),
|
||||
(fun, (typ, _, _, _, _)) <- Map.assocs (funs abs),
|
||||
let (_, DTyp hypos cat args) = alphaConvert emptyEnv typ] ++++
|
||||
plFacts name "def" 2 "(?Fun, ?Expr)"
|
||||
[[plp fun, plp expr] |
|
||||
(fun, (_, _, Just (eqs,_), _)) <- Map.assocs (funs abs),
|
||||
(fun, (_, _, _, Just (eqs,_), _)) <- Map.assocs (funs abs),
|
||||
let (_, expr) = alphaConvert emptyEnv eqs]
|
||||
)
|
||||
where plType cat args = plTerm (plp cat) (map plp args)
|
||||
|
||||
@@ -40,8 +40,8 @@ pgf2python pgf = ("# -*- coding: utf-8 -*-" ++++
|
||||
abs = abstract pgf
|
||||
cncs = concretes pgf
|
||||
|
||||
pyAbsdef :: (Type, Int, Maybe ([Equation], [[M.Instr]]), Double) -> String
|
||||
pyAbsdef (typ, _, _, _) = pyTuple 0 id [pyCId cat, pyList 0 pyCId args]
|
||||
pyAbsdef :: (Type, [DepPragma], Int, Maybe ([Equation], [[M.Instr]]), Double) -> String
|
||||
pyAbsdef (typ, _, _, _, _) = pyTuple 0 id [pyCId cat, pyList 0 pyCId args]
|
||||
where (args, cat) = M.catSkeleton typ
|
||||
|
||||
pyLiteral :: Literal -> String
|
||||
@@ -62,7 +62,7 @@ pyConcrete cnc = pyDict 3 pyStr id [
|
||||
]
|
||||
where pyProds prods = pyList 5 pyProduction (Set.toList prods)
|
||||
pyCncCat (CncCat start end _) = pyList 0 pyCat [start..end]
|
||||
pyCncFun (CncFun f lins) = pyTuple 0 id [pyList 0 pySeq (Array.elems lins), pyCId f]
|
||||
pyCncFun (CncFun fns lins) = pyTuple 0 id [pyList 0 pySeq (Array.elems lins), pyList 0 pyCId fns]
|
||||
pySymbols syms = pyList 0 pySymbol (Array.elems syms)
|
||||
|
||||
pyProduction :: Production -> String
|
||||
|
||||
@@ -157,6 +157,7 @@ data Flags = Flags {
|
||||
optDocumentRoot :: Maybe FilePath, -- For --server mode
|
||||
optRecomp :: Recomp,
|
||||
optProbsFile :: Maybe FilePath,
|
||||
optLabelsFile :: Maybe FilePath,
|
||||
optRetainResource :: Bool,
|
||||
optName :: Maybe String,
|
||||
optPreprocessors :: [String],
|
||||
@@ -268,6 +269,7 @@ defaultFlags = Flags {
|
||||
optDocumentRoot = Nothing,
|
||||
optRecomp = RecompIfNewer,
|
||||
optProbsFile = Nothing,
|
||||
optLabelsFile = Nothing,
|
||||
optRetainResource = False,
|
||||
|
||||
optName = Nothing,
|
||||
@@ -349,8 +351,9 @@ optDescr =
|
||||
Option [] ["gfo","no-recomp"] (NoArg (recomp NeverRecomp))
|
||||
"Never recompile from source, if there is already .gfo file.",
|
||||
Option [] ["retain"] (NoArg (set $ \o -> o { optRetainResource = True })) "Retain opers.",
|
||||
Option [] ["probs"] (ReqArg probsFile "file.probs") "Read probabilities from file.",
|
||||
Option ['n'] ["name"] (ReqArg name "NAME")
|
||||
Option [] ["probs"] (ReqArg probsFile "file.probs") "Read probabilities from a file.",
|
||||
Option [] ["depconf"] (ReqArg labelsFile "file.labels") "Read a configuration for generation of syntactic dependency graphs from a file.",
|
||||
Option ['n'] ["name"] (ReqArg name "NAME")
|
||||
(unlines ["Use NAME as the name of the output. This is used in the output file names, ",
|
||||
"with suffixes depending on the formats, and, when relevant, ",
|
||||
"internally in the output."]),
|
||||
@@ -373,6 +376,8 @@ optDescr =
|
||||
"Enable or disable global grammar optimization. This could significantly reduce the size of the final PGF file",
|
||||
Option [] ["split-pgf"] (NoArg (splitPGF True))
|
||||
"Split the PGF into one file per language. This allows the runtime to load only individual languages",
|
||||
Option [] ["stem"] (onOff (toggleOptimize OptStem) True) "Perform stem-suffix analysis (default on).",
|
||||
Option [] ["cse"] (onOff (toggleOptimize OptCSE) True) "Perform common sub-expression elimination (default on).",
|
||||
Option [] ["cfg"] (ReqArg cfgTransform "TRANS") "Enable or disable specific CFG transformations. TRANS = merge, no-merge, bottomup, no-bottomup, ...",
|
||||
Option [] ["heuristic_search_factor"] (ReqArg (readDouble (\d o -> o { optHeuristicFactor = Just d })) "FACTOR") "Set the heuristic search factor for statistical parsing",
|
||||
Option [] ["case_sensitive"] (onOff (\v -> set $ \o -> o{optCaseSensitive=v}) True) "Set the parser in case-sensitive/insensitive mode [sensitive by default]",
|
||||
@@ -426,6 +431,7 @@ optDescr =
|
||||
gfDocuRoot x = set $ \o -> o { optDocumentRoot = Just x }
|
||||
recomp x = set $ \o -> o { optRecomp = x }
|
||||
probsFile x = set $ \o -> o { optProbsFile = Just x }
|
||||
labelsFile x = set $ \o -> o { optLabelsFile = Just x }
|
||||
|
||||
name x = set $ \o -> o { optName = Just x }
|
||||
addLibDir x = set $ \o -> o { optLibraryPath = x:optLibraryPath o }
|
||||
@@ -446,6 +452,8 @@ optDescr =
|
||||
optimize_pgf x = set $ \o -> o { optOptimizePGF = x }
|
||||
splitPGF x = set $ \o -> o { optSplitPGF = x }
|
||||
|
||||
toggleOptimize x b = set $ setOptimization' x b
|
||||
|
||||
cfgTransform x = let (x', b) = case x of
|
||||
'n':'o':'-':rest -> (rest, False)
|
||||
_ -> (x, True)
|
||||
|
||||
@@ -43,6 +43,7 @@ import GF.Infra.UseIO(readBinaryFile,writeBinaryFile,ePutStrLn)
|
||||
import GF.Infra.SIO(captureSIO)
|
||||
import GF.Data.Utilities(apSnd,mapSnd)
|
||||
import qualified PGFService as PS
|
||||
import qualified ExampleService as ES
|
||||
import Data.Version(showVersion)
|
||||
import Paths_gf(getDataDir,version)
|
||||
import GF.Infra.BuildInfo (buildInfo)
|
||||
@@ -170,6 +171,7 @@ handle logLn documentroot state0 cache execute1 stateVar
|
||||
(_ ,_ ,".pgf") -> do --debug $ "PGF service: "++path
|
||||
wrapCGI $ PS.cgiMain' cache path
|
||||
(dir,"grammars.cgi",_ ) -> grammarList dir (decoded qs)
|
||||
(dir ,"exb.fcgi" ,_ ) -> wrapCGI $ ES.cgiMain' root dir (PS.pgfCache cache)
|
||||
_ -> serveStaticFile rpath path
|
||||
where path = translatePath rpath
|
||||
_ -> return $ resp400 upath
|
||||
@@ -207,7 +209,7 @@ handle logLn documentroot state0 cache execute1 stateVar
|
||||
((_,(value,_)):qs1,qs2) -> do put_qs (qs1++qs2)
|
||||
return value
|
||||
_ -> err $ resp400 $ "no "++field++" in request"
|
||||
|
||||
|
||||
inDir ok = cd =<< look "dir"
|
||||
where
|
||||
cd ('/':dir@('t':'m':'p':_)) =
|
||||
|
||||
@@ -74,12 +74,15 @@ pgfToCFG pgf lang = mkCFG (showCId (lookStartCat pgf)) extCats (startRules ++ co
|
||||
|
||||
ruleToCFRule :: (FId,Production) -> [CFRule]
|
||||
ruleToCFRule (c,PApply funid args) =
|
||||
[Rule (fcatToCat c l) (mkRhs row) (profilesToTerm [fixProfile row n | n <- [0..length args-1]])
|
||||
[Rule (fcatToCat c l) (mkRhs row) term
|
||||
| (l,seqid) <- Array.assocs rhs
|
||||
, let row = sequences cnc ! seqid
|
||||
, not (containsLiterals row)]
|
||||
, not (containsLiterals row)
|
||||
, f <- fns
|
||||
, let term = profilesToTerm f [fixProfile row n | n <- [0..length args-1]]
|
||||
]
|
||||
where
|
||||
CncFun f rhs = cncfuns cnc ! funid
|
||||
CncFun fns rhs = cncfuns cnc ! funid
|
||||
|
||||
mkRhs :: Array DotPos Symbol -> [CFSymbol]
|
||||
mkRhs = concatMap symbolToCFSymbol . Array.elems
|
||||
@@ -111,8 +114,8 @@ pgfToCFG pgf lang = mkCFG (showCId (lookStartCat pgf)) extCats (startRules ++ co
|
||||
getPos (SymLit j _) = [j]
|
||||
getPos _ = []
|
||||
|
||||
profilesToTerm :: [Profile] -> CFTerm
|
||||
profilesToTerm ps = CFObj f (zipWith profileToTerm argTypes ps)
|
||||
profilesToTerm :: CId -> [Profile] -> CFTerm
|
||||
profilesToTerm f ps = CFObj f (zipWith profileToTerm argTypes ps)
|
||||
where (argTypes,_) = catSkeleton $ lookType (abstract pgf) f
|
||||
|
||||
profileToTerm :: CId -> Profile -> CFTerm
|
||||
|
||||
553
src/example-based/ExampleDemo.hs
Normal file
@@ -0,0 +1,553 @@
|
||||
module ExampleDemo (Environ,initial,getNext, provideExample, testThis,mkFuncWithArg,searchGoodTree,isMeta)
|
||||
where
|
||||
|
||||
import PGF
|
||||
--import System.IO
|
||||
import Data.List
|
||||
--import Control.Monad
|
||||
import qualified Data.Map as Map
|
||||
--import qualified Data.IntMap as IntMap
|
||||
import qualified Data.Set as Set
|
||||
import Data.Maybe
|
||||
--import System.Environment (getArgs)
|
||||
import System.Random (RandomGen) --newStdGen
|
||||
|
||||
|
||||
type MyType = CId -- name of the categories from the program
|
||||
type ConcType = CId -- categories from the resource grammar, that we parse on
|
||||
type MyFunc = CId -- functions that we need to implement
|
||||
--type FuncWithArg = ((MyFunc, MyType), Expr) -- function with arguments
|
||||
type InterInstr = [String] -- lincats that were generated but not written to the file
|
||||
|
||||
|
||||
|
||||
data FuncWithArg = FuncWithArg
|
||||
{getName :: MyFunc, -- name of the function to generate
|
||||
getType :: MyType, -- return type of the function
|
||||
getTypeArgs :: [MyType] -- types of arguments
|
||||
}
|
||||
deriving (Show,Eq,Ord)
|
||||
|
||||
-- we assume that it's for English for the moment
|
||||
|
||||
|
||||
type TypeMap = Map.Map MyType ConcType -- mapping found from a file
|
||||
|
||||
type ConcMap = Map.Map MyFunc Expr -- concrete expression after parsing
|
||||
|
||||
data Environ = Env {getTypeMap :: TypeMap, -- mapping between a category in the grammar and a concrete type from RGL
|
||||
getConcMap :: ConcMap, -- concrete expression after parsing
|
||||
getSigs :: Map.Map MyType [FuncWithArg], -- functions for which we have the concrete syntax already with args
|
||||
getAll :: [FuncWithArg] -- all the functions with arguments
|
||||
}
|
||||
|
||||
|
||||
getNext :: Environ -> Environ -> ([MyFunc],[MyFunc])
|
||||
getNext env example_env =
|
||||
let sgs = getSigs env
|
||||
allfuncs = getAll env
|
||||
names = Set.fromList $ map getName $ concat $ Map.elems sgs
|
||||
exampleable = filter (\x -> (isJust $ getNameExpr x env)
|
||||
&&
|
||||
(not $ Set.member x names) -- maybe drop this if you want to also rewrite from examples...
|
||||
) $ map getName allfuncs
|
||||
testeable = filter (\x -> (isJust $ getNameExpr x env )
|
||||
&&
|
||||
(Set.member x names)
|
||||
) $ map getName allfuncs
|
||||
|
||||
in (exampleable,testeable)
|
||||
|
||||
|
||||
provideExample :: RandomGen gen => gen -> Environ -> MyFunc -> PGF -> PGF -> Language -> Maybe (Expr,String)
|
||||
provideExample gen env myfunc parsePGF pgfFile lang =
|
||||
fmap giveExample $ getNameExpr myfunc env
|
||||
where
|
||||
giveExample e_ =
|
||||
let newexpr = head $ generateRandomFromDepth gen pgfFile e_ (Just 5) -- change here with the new random generator
|
||||
ty = getType $ head $ filter (\x -> getName x == myfunc) $ getAll env
|
||||
embeddedExpr = maybe "" (\x -> ", as in: " ++ q (linearize pgfFile lang x)) (embedInStart (getAll env) (Map.fromList [(ty,e_)]))
|
||||
lexpr = linearize pgfFile lang newexpr
|
||||
q s = sq++s++sq
|
||||
sq = "\""
|
||||
in (newexpr,q lexpr ++ embeddedExpr)
|
||||
-- question, you need the IO monad for the random generator, how to do otherwise ??
|
||||
-- question can you make the expression bold/italic - somehow distinguishable from the rest ?
|
||||
|
||||
|
||||
|
||||
testThis :: Environ -> MyFunc -> PGF -> Language -> Maybe String
|
||||
testThis env myfunc parsePGF lang =
|
||||
fmap (linearize parsePGF lang . mapToResource env . llin env) $
|
||||
getNameExpr myfunc env
|
||||
|
||||
|
||||
-- we assume that even the functions linearized by the user will still be in getSigs along with their linearization
|
||||
|
||||
|
||||
-- fill in the blancs of an expression that we want to linearize for testing purposes
|
||||
---------------------------------------------------------------------------
|
||||
|
||||
llin :: Environ -> Expr -> Expr
|
||||
llin env expr =
|
||||
let
|
||||
(id,args) = fromJust $ unApp expr
|
||||
--cexpr = fromJust $ Map.lookup id (getConcMap env)
|
||||
in
|
||||
if any isMeta args
|
||||
then let
|
||||
sigs = concat $ Map.elems $ getSigs env
|
||||
tys = findExprWhich sigs id
|
||||
in replaceConcArg 1 tys expr env
|
||||
else mkApp id $ map (llin env) args
|
||||
|
||||
|
||||
-- argument of the meta variable to replace, list of arguments left, expression to replace, environment, current replace expression
|
||||
replaceConcArg :: Int -> [MyType] -> Expr -> Environ -> Expr
|
||||
replaceConcArg i [] expr env = expr
|
||||
replaceConcArg i (t:ts) expr env = -- TO DO : insert randomness here !!
|
||||
let ss = fromJust $ Map.lookup t $ getSigs env
|
||||
args = filter (null . getTypeArgs) ss
|
||||
finArg = if null args then let l = last ss in llin env (mkApp (getName l) [mkMeta j | j <- [1..(length $ getTypeArgs l)]])
|
||||
else mkApp (getName $ last args) []
|
||||
in
|
||||
let newe = replaceOne i finArg expr
|
||||
in replaceConcArg (i+1) ts newe env
|
||||
|
||||
-- replace a certain metavariable with a certain expression in another expression - return updated expression
|
||||
replaceOne :: Int -> Expr -> Expr -> Expr
|
||||
replaceOne i erep expr =
|
||||
if isMeta expr && ((fromJust $ unMeta expr) == i)
|
||||
then erep
|
||||
else if isMeta expr then expr
|
||||
else let (id,args) = fromJust $ unApp expr
|
||||
in
|
||||
mkApp id $ map (replaceOne i erep) args
|
||||
|
||||
|
||||
findExprWhich :: [FuncWithArg] -> MyFunc -> [MyType]
|
||||
findExprWhich lst f = getTypeArgs $ head $ filter (\x -> getName x == f) lst
|
||||
|
||||
|
||||
mapToResource :: Environ -> Expr -> Expr
|
||||
mapToResource env expr =
|
||||
let (id,args) = maybe (error $ "tried to unwrap " ++ showExpr [] expr) (\x -> x) (unApp expr)
|
||||
cmap = getConcMap env
|
||||
cexp = maybe (error $ "didn't find " ++ showCId id ++ " in "++ show cmap) (\x -> x) (Map.lookup id cmap)
|
||||
in
|
||||
if null args then cexp
|
||||
else let newargs = map (mapToResource env) args
|
||||
in replaceAllArgs cexp 1 newargs
|
||||
where
|
||||
replaceAllArgs expr i [] = expr
|
||||
replaceAllArgs expr i (x:xs) = replaceAllArgs (replaceOne i x expr) (i+1) xs
|
||||
|
||||
|
||||
|
||||
-----------------------------------------------
|
||||
|
||||
-- embed expression in another one from the start category
|
||||
|
||||
embedInStart :: [FuncWithArg] -> Map.Map MyType Expr -> Maybe Expr
|
||||
embedInStart fss cs =
|
||||
let currset = Map.toList cs
|
||||
nextset = Map.fromList $ concat [ if elem myt (getTypeArgs farg)
|
||||
then connectWithArg (myt,exp) farg else []
|
||||
| (myt,exp) <- currset, farg <- fss]
|
||||
nextmap = Map.union cs nextset
|
||||
maybeExpr = Map.lookup startCateg nextset
|
||||
in if isNothing maybeExpr then
|
||||
if Map.size nextmap == Map.size cs then Nothing --error $ "could't build " ++ show startCateg ++ "with " ++ show fss
|
||||
else embedInStart fss nextmap
|
||||
else return $ fromJust maybeExpr
|
||||
where
|
||||
connectWithArg (myt,exp) farg =
|
||||
let ind = head $ elemIndices myt (getTypeArgs farg)
|
||||
in [(getType farg, mkApp (getName farg) $ [mkMeta i | i <- [1..ind]] ++ [exp] ++ [mkMeta i | i <- [(ind + 1)..((length $ getTypeArgs farg) - 1)]])]
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
-----------------------------------------------
|
||||
{-
|
||||
updateConcMap :: Environ -> MyFunc -> Expr -> Environ
|
||||
updateConcMap env myf expr =
|
||||
Env (getTypeMap env) (Map.insert myf expr (getConcMap env)) (getSigs env) (getAll env)
|
||||
|
||||
|
||||
updateInterInstr :: Environ -> MyType -> FuncWithArg -> Environ
|
||||
updateInterInstr env myt myf =
|
||||
let ii = getSigs env
|
||||
newInterInstr =
|
||||
maybe (Map.insert myt [myf] ii) (\x -> Map.insert myt (myf:x) ii) $ Map.lookup myt ii
|
||||
in Env (getTypeMap env) (getConcMap env) newInterInstr (getAll env)
|
||||
|
||||
|
||||
putSignatures :: Environ -> [FuncWithArg] -> Environ
|
||||
putSignatures env fss =
|
||||
Env (getTypeMap env) (getConcMap env) (mkSigs fss) (getAll env)
|
||||
|
||||
|
||||
updateEnv :: Environ -> FuncWithArg -> MyType -> Expr -> Environ
|
||||
updateEnv env myf myt expr =
|
||||
let ii = getSigs env
|
||||
nn = getName myf
|
||||
newInterInstr =
|
||||
maybe (Map.insert myt [myf] ii) (\x -> Map.insert myt (myf:x) ii) $ Map.lookup myt ii
|
||||
in Env (getTypeMap env) (Map.insert nn expr (getConcMap env)) newInterInstr (getAll env)
|
||||
-}
|
||||
|
||||
mkSigs :: [FuncWithArg] -> Map.Map MyType [FuncWithArg]
|
||||
mkSigs fss = Map.fromListWith (++) $ zip (map getType fss) (map (\x -> [x]) fss)
|
||||
|
||||
|
||||
|
||||
{------------------------------------
|
||||
lang :: String
|
||||
lang = "Eng"
|
||||
|
||||
|
||||
parseLang :: Language
|
||||
parseLang = fromJust $ readLanguage "ParseEng"
|
||||
|
||||
|
||||
parsePGFfile :: String
|
||||
parsePGFfile = "ParseEngAbs.pgf"
|
||||
------------------------------------}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
searchGoodTree :: Environ -> Expr -> [Expr] -> IO (Maybe (Expr,Expr))
|
||||
searchGoodTree env expr [] = return Nothing
|
||||
searchGoodTree env expr (e:es) =
|
||||
do val <- debugReplaceArgs expr e env
|
||||
maybe (searchGoodTree env expr es) (\x -> return $ Just (x,e)) val
|
||||
|
||||
|
||||
|
||||
getNameExpr :: MyFunc -> Environ -> Maybe Expr
|
||||
getNameExpr myfunc env =
|
||||
let allfunc = filter (\x -> getName x == myfunc) $ getAll env
|
||||
in
|
||||
if null allfunc then Nothing
|
||||
else getExpr (head allfunc) env
|
||||
|
||||
-- find an expression to generate where we have all the other elements available
|
||||
getExpr :: FuncWithArg -> Environ -> Maybe Expr
|
||||
getExpr farg env =
|
||||
let tys = getTypeArgs farg
|
||||
ctx = getSigs env
|
||||
lst = getConcTypes ctx tys 1
|
||||
in if (all isJust lst) then Just $ mkApp (getName farg) (map fromJust lst)
|
||||
else Nothing
|
||||
where getConcTypes context [] i = []
|
||||
getConcTypes context (ty:types) i =
|
||||
let pos = Map.lookup ty context
|
||||
in
|
||||
if isNothing pos || (null $ fromJust pos) then [Nothing]
|
||||
else
|
||||
let mm = last $ fromJust pos
|
||||
mmargs = getTypeArgs mm
|
||||
newi = i + length mmargs - 1
|
||||
lst = getConcTypes (Map.insert ty (init $ (fromJust pos)) context) types (newi+1)
|
||||
in
|
||||
if (all isJust lst) then -- i..newi
|
||||
(Just $ mkApp (getName mm) [mkMeta j | j <- [1..(length mmargs)]]) : lst
|
||||
else [Nothing]
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
-- only covers simple expressions with meta variables, not the rest...
|
||||
isGeneralizationOf :: Expr -> Expr -> Bool
|
||||
isGeneralizationOf genExpr testExpr =
|
||||
if isMeta genExpr then True
|
||||
else if isMeta testExpr then False
|
||||
else let genUnwrap = unApp genExpr
|
||||
testUnwrap = unApp testExpr
|
||||
in if isNothing genUnwrap || isNothing testUnwrap then False -- see if you can generalize here
|
||||
else let (gencid, genargs) = fromJust genUnwrap
|
||||
(testcid, testargs) = fromJust testUnwrap
|
||||
in
|
||||
(gencid == testcid) && (length genargs == length testargs)
|
||||
&& (and [isGeneralizationOf g t | (g,t) <- (zip genargs testargs)])
|
||||
|
||||
{-do lst <- getConcTypes context types (i+1)
|
||||
return $ mkMeta i : lst -}
|
||||
|
||||
debugReplaceArgs :: Expr -> Expr -> Environ -> IO (Maybe Expr)
|
||||
debugReplaceArgs aexpr cexpr env =
|
||||
if isNothing $ unApp aexpr then return Nothing
|
||||
else if any isNothing $ map unApp $ snd $ fromJust $ unApp aexpr then return Nothing
|
||||
else
|
||||
let args = map (fst.fromJust.unApp) $ snd $ fromJust $ unApp aexpr
|
||||
concExprs = map (\x -> fromJust $ Map.lookup x $ getConcMap env) args
|
||||
in startReplace 1 cexpr concExprs
|
||||
where
|
||||
startReplace i cex [] = return $ Just cex
|
||||
startReplace i cex (a:as) = do val <- debugReplaceConc cex i a
|
||||
maybe ( --do putStrLn $ "didn't find "++ showExpr [] a ++ " in " ++showExpr [] cexpr
|
||||
return Nothing)
|
||||
(\x -> --do putStrLn $ "found it, the current expression is "++ showExpr [] x
|
||||
startReplace (i+1) x as)
|
||||
val
|
||||
|
||||
debugReplaceConc :: Expr -> Int -> Expr -> IO (Maybe Expr)
|
||||
debugReplaceConc expr i e =
|
||||
let (newe,isThere) = searchArg expr
|
||||
in if isThere then return $ Just newe else return $ Nothing
|
||||
where
|
||||
searchArg e_ =
|
||||
if isGeneralizationOf e e_ then (mkMeta i, True)
|
||||
else maybe (e_,False) (\(cid,args) -> let repargs = map searchArg args
|
||||
in (mkApp cid (map fst repargs), or $ map snd repargs)) $ unApp e_
|
||||
|
||||
|
||||
{-
|
||||
-- replaceArgs : Original expression to parse (from abstract syntax) -> Concrete expression (parsed)
|
||||
replaceArgs :: Expr -> Expr -> Environ -> Maybe Expr
|
||||
replaceArgs aexpr cexpr env =
|
||||
if isNothing $ unApp aexpr then error $ "could't unwrap this "++ show aexpr
|
||||
else if any isNothing $ map unApp $ snd $ fromJust $ unApp aexpr then error $ "couldn't unwrap more this : "++ show aexpr
|
||||
else
|
||||
let args = map (fst.fromJust.unApp) $ snd $ fromJust $ unApp aexpr
|
||||
concExprs = map (\x -> fromJust $ Map.lookup x $ getConcMap env) args
|
||||
in startReplace 1 cexpr concExprs
|
||||
where
|
||||
startReplace i cex [] = return cex
|
||||
startReplace i cex (a:as) = maybe Nothing (\x -> startReplace (i+1) x as) $ replaceConc cex i a
|
||||
|
||||
|
||||
|
||||
replaceConc :: Expr -> Int -> Expr -> Maybe Expr
|
||||
replaceConc expr i e =
|
||||
let (newe,isThere) = searchArg expr
|
||||
in if isThere then return newe else Nothing
|
||||
where
|
||||
searchArg e_ =
|
||||
if isGeneralizationOf e e_ then (mkMeta i, True)
|
||||
else maybe (e_,False) (\(cid,args) -> let repargs = map searchArg args
|
||||
in (mkApp cid (map fst repargs), or $ map snd repargs)) $ unApp e_
|
||||
|
||||
|
||||
|
||||
writeResults :: Environ -> String -> IO ()
|
||||
writeResults env fileName =
|
||||
let cmap = getConcMap env
|
||||
lincats = unlines $ map (\(x,y) -> "lincat " ++ showCId x ++ " = " ++ showCId y ++ " ; " ) $ Map.toList $ getTypeMap env
|
||||
sigs = unlines $ map
|
||||
(\x -> let n = getName x
|
||||
no = length $ getTypeArgs x
|
||||
oargs = unwords $ ("lin " ++ showCId n) : ["o"++show i | i <- [1..no]]
|
||||
in (oargs ++ " = " ++ (simpleReplace $ showExpr [] $ fromJust $ Map.lookup n cmap) ++ " ; ")) $ concat $ Map.elems $ getSigs env
|
||||
in
|
||||
writeFile fileName ("\n" ++ lincats ++ "\n\n" ++ sigs)
|
||||
|
||||
|
||||
simpleReplace :: String -> String
|
||||
simpleReplace [] = []
|
||||
simpleReplace ('?':xs) = 'o' : simpleReplace xs
|
||||
simpleReplace (x:xs) = x : simpleReplace xs
|
||||
-}
|
||||
|
||||
isMeta :: Expr -> Bool
|
||||
isMeta = isJust.unMeta
|
||||
|
||||
-- works with utf-8 characters also, as it seems
|
||||
|
||||
|
||||
mkFuncWithArg :: ((CId,CId),[CId]) -> FuncWithArg
|
||||
mkFuncWithArg ((c1,c2),cids) = FuncWithArg c1 c2 cids
|
||||
|
||||
|
||||
---------------------------------------------------------------------------------
|
||||
|
||||
initial :: TypeMap -> ConcMap -> [FuncWithArg] -> [FuncWithArg] -> Environ
|
||||
initial tm cm fss allfs = Env tm cm (mkSigs fss) allfs
|
||||
{-
|
||||
testInit :: [FuncWithArg] -> Environ
|
||||
testInit allfs = initial lTypes Map.empty [] allfs
|
||||
|
||||
lTypes = Map.fromList [(mkCId "Comment", mkCId "S"),(mkCId "Item", mkCId "NP"), (mkCId "Kind", mkCId "CN"), (mkCId "Quality", mkCId "AP")]
|
||||
-}
|
||||
startCateg = mkCId "Comment"
|
||||
-- question about either to give the startcat or not ...
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
----------------------------------------------------------------------------------------------------------
|
||||
{-
|
||||
main =
|
||||
do args <- getArgs
|
||||
case args of
|
||||
[pgfFile] ->
|
||||
do pgf <- readPGF pgfFile
|
||||
parsePGF <- readPGF parsePGFfile
|
||||
fsWithArg <- forExample pgf
|
||||
let funcsWithArg = map (map mkFuncWithArg) fsWithArg
|
||||
let morpho = buildMorpho parsePGF parseLang
|
||||
let fss = concat funcsWithArg
|
||||
let fileName = takeWhile (/='.') pgfFile ++ lang ++ ".gf"
|
||||
env <- start parsePGF pgf morpho (testInit fss) fss
|
||||
putStrLn $ "Should I write the results to a file ? yes/no"
|
||||
ans <-getLine
|
||||
if ans == "yes" then do writeResults env fileName
|
||||
putStrLn $ "Wrote file " ++ fileName
|
||||
else return ()
|
||||
_ -> fail "usage : Testing <path-to-pgf> "
|
||||
|
||||
|
||||
|
||||
start :: PGF -> PGF -> Morpho -> Environ -> [FuncWithArg] -> IO Environ
|
||||
start parsePGF pgfFile morpho env lst =
|
||||
do putStrLn "Do you want examples from another language ? (no/concrete syntax name otherwise)"
|
||||
ans1 <- getLine
|
||||
putStrLn "Do you want testing mode ? (yes/no)"
|
||||
ans2 <- getLine
|
||||
case (ans1,ans2) of
|
||||
("no","no") -> do putStrLn "no extra language, just the abstract syntax tree"
|
||||
interact env lst False Nothing
|
||||
(_,"no") -> interact env lst False (readLanguage ans1)
|
||||
("no","yes") -> do putStrLn "no extra language, just the abstract syntax tree"
|
||||
interact env lst True Nothing
|
||||
(_,"yes") -> interact env lst True (readLanguage ans1)
|
||||
("no",_) -> do putStrLn "no extra language, just the abstract syntax tree"
|
||||
putStrLn $ "I assume you don't want the testing mode ... "
|
||||
interact env lst False Nothing
|
||||
(_,_) -> do putStrLn $ "I assume you don't want the testing mode ... "
|
||||
interact env lst False (readLanguage ans1)
|
||||
where
|
||||
|
||||
interact environ [] func _ = return environ
|
||||
interact environ (farg:fargs) boo otherLang =
|
||||
do
|
||||
maybeEnv <- basicInter farg otherLang environ boo
|
||||
if isNothing maybeEnv then return environ
|
||||
else interact (fromJust maybeEnv) fargs boo otherLang
|
||||
|
||||
basicInter farg js environ False =
|
||||
let e_ = getExpr farg environ in
|
||||
if isNothing e_ then return $ Just environ
|
||||
else parseAndBuild farg js environ (getType farg) e_ Nothing
|
||||
basicInter farg js environ True =
|
||||
let (e_,e_test) = get2Expr farg environ in
|
||||
if isNothing e_ then return $ Just environ
|
||||
else if isNothing e_test then do putStrLn $ "not enough arguments "++ (showCId $ getName farg)
|
||||
parseAndBuild farg js environ (getType farg) e_ Nothing
|
||||
else parseAndBuild farg js environ (getType farg) e_ e_test
|
||||
|
||||
-- . head . generateRandomFrom gen2 pgfFile
|
||||
parseAndBuild farg js environ ty e_ e_test =
|
||||
do let expr = fromJust e_
|
||||
gen1 <- newStdGen
|
||||
gen2 <- newStdGen
|
||||
let newexpr = head $ generateRandomFrom gen1 pgfFile expr
|
||||
let embeddedExpr = maybe "***" (showExpr [] ) (embedInStart (getAll environ) (Map.fromList [(ty,expr)]))
|
||||
let lexpr = if isNothing js then "" else "\n-- " ++ linearize pgfFile (fromJust js) newexpr ++ " --"
|
||||
putStrLn $ "Give an example for " ++ (showExpr [] expr)
|
||||
++ lexpr ++ "and now"
|
||||
++ "\n\nas in " ++ embeddedExpr ++ "\n\n"
|
||||
--
|
||||
ex <- getLine
|
||||
if (ex == ":q") then return Nothing
|
||||
else
|
||||
let ctype = fromJust $ Map.lookup (getType farg) (getTypeMap environ) in
|
||||
do env' <- decypher farg ex expr environ (fromJust $ readType $ showCId ctype) e_test
|
||||
return (Just env')
|
||||
|
||||
decypher farg ex expr environ ty e_test =
|
||||
--do putStrLn $ "We need to parse " ++ ex ++ " as " ++ show ctype
|
||||
let pTrees = parse parsePGF (fromJust $ readLanguage "ParseEng") ty ex in
|
||||
pickTree farg expr environ ex e_test pTrees
|
||||
|
||||
-- putStrLn $ "And now for testing, \n is this also correct yes/no \n ## " ++ (linearize parsePGF parseLang $ mapToResource newenv $ llin newenv e_test) ++ " ##"
|
||||
|
||||
-- select the right tree among the options given by the parser
|
||||
pickTree farg expr environ ex e_test [] =
|
||||
let miswords = morphoMissing morpho (words ex)
|
||||
in
|
||||
if null miswords then do putStrLn $ "all words known, but some syntactic construction is not covered by the grammar..."
|
||||
return environ
|
||||
else do putStrLn $ "the following words are unknown, please add them to the lexicon: " ++ show miswords
|
||||
return environ
|
||||
pickTree farg expr environ ex e_test [tree] =
|
||||
do val <- searchGoodTree environ expr [tree] -- maybe order here after the probabilities for better precision
|
||||
maybe (do putStrLn $ "none of the trees is consistent with the rest of the grammar, please check arguments "
|
||||
return environ)
|
||||
(\(x,newtree) -> let newenv = updateEnv environ farg (getType farg) x in
|
||||
do putStrLn $ "the result is "++showExpr [] x
|
||||
newtestenv <- testTest newenv e_test -- question ? should it belong there - there is just one possibility of a tree...
|
||||
return newenv) val
|
||||
pickTree farg expr environ ex e_test parseTrees =
|
||||
do putStrLn $ "There is more than one possibility, do you want to choose the right tree yourself ? yes/no "
|
||||
putStr " >"
|
||||
ans <- getLine
|
||||
if ans == "yes" then do pTree <- chooseRightTree parseTrees
|
||||
processTree farg environ expr pTree e_test
|
||||
else processTree farg environ expr parseTrees e_test
|
||||
|
||||
-- introduce testing function, if it doesn't work, then reparse, take that tree
|
||||
testTree envv e_test = return envv -- TO DO - add testing here
|
||||
|
||||
testTest envv Nothing = return envv
|
||||
testTest envv (Just exxpr) = testTree envv exxpr
|
||||
|
||||
|
||||
-- allows the user to pick his own tree
|
||||
chooseRightTree trees = return trees -- TO DO - add something clever here
|
||||
|
||||
-- selects the tree from where one can abstract over the original arguments
|
||||
processTree farg environ expr lsTrees e_test =
|
||||
let trmes = if length lsTrees == 1 then "the tree is not consistent " else "none of the trees is consistent " in
|
||||
do val <- searchGoodTree environ expr lsTrees
|
||||
maybe (do putStrLn $ trmes ++ "with the rest of the grammar, please check arguments! "
|
||||
return environ)
|
||||
(\(x,newtree) -> let newenv = updateEnv environ farg (getType farg) x in
|
||||
do putStrLn $ "the result is "++showExpr [] x
|
||||
newtestenv <- testTest newenv e_test
|
||||
return newenv) val
|
||||
|
||||
|
||||
|
||||
-------------------------------
|
||||
|
||||
get2Expr :: FuncWithArg -> Environ -> (Maybe Expr, Maybe Expr)
|
||||
get2Expr farg env =
|
||||
let tys = getTypeArgs farg
|
||||
ctx = getSigs env
|
||||
(lst1,lst2) = getConcTypes2 ctx tys 1
|
||||
arg1 = if (all isJust lst1) then Just $ mkApp (getName farg) (map fromJust lst1) else Nothing
|
||||
arg2 = if (all isJust lst2) then Just $ mkApp (getName farg) (map fromJust lst2) else Nothing
|
||||
in if arg1 == arg2 then (arg1, Nothing)
|
||||
else (arg1,arg2)
|
||||
where
|
||||
getConcTypes2 context [] i = ([],[])
|
||||
getConcTypes2 context (ty:types) i =
|
||||
let pos = Map.lookup ty context
|
||||
in
|
||||
if isNothing pos || (null $ fromJust pos) then ([Nothing],[Nothing])
|
||||
else
|
||||
let (mm,tt) = (last $ fromJust pos, head $ fromJust pos)
|
||||
mmargs = getTypeArgs mm
|
||||
newi = i + length mmargs - 1
|
||||
(lst1,lst2) = getConcTypes2 (Map.insert ty (init (fromJust pos)) context) types (newi+1)
|
||||
ttargs = getTypeArgs tt
|
||||
newtti = i + length ttargs - 1
|
||||
fstArg = if (all isJust lst1) then -- i..newi
|
||||
(Just $ mkApp (getName mm) [mkMeta j | j <- [1..(length mmargs)]]) : lst1
|
||||
else [Nothing]
|
||||
sndArg = if (all isJust lst2) then
|
||||
(Just $ mkApp (getName tt) [mkMeta j | j <- [1..(length ttargs)]]) : lst2
|
||||
else [Nothing]
|
||||
in
|
||||
(fstArg,sndArg)
|
||||
|
||||
|
||||
-}
|
||||
|
||||
128
src/example-based/ExampleService.hs
Normal file
@@ -0,0 +1,128 @@
|
||||
module ExampleService(cgiMain,cgiMain',newPGFCache) where
|
||||
import System.Random(newStdGen)
|
||||
import System.FilePath((</>),makeRelative)
|
||||
import Data.Map(fromList)
|
||||
import Data.Char(isDigit)
|
||||
import Data.Maybe(fromJust)
|
||||
import qualified Codec.Binary.UTF8.String as UTF8 (decodeString)
|
||||
import PGF
|
||||
import GF.Compile.ToAPI
|
||||
import Network.CGI
|
||||
import Text.JSON
|
||||
import CGIUtils
|
||||
import Cache
|
||||
import qualified ExampleDemo as E
|
||||
|
||||
newPGFCache = newCache readPGF
|
||||
|
||||
|
||||
cgiMain :: Cache PGF -> CGI CGIResult
|
||||
cgiMain = handleErrors . handleCGIErrors . cgiMain' "." "."
|
||||
|
||||
cgiMain' root cwd cache =
|
||||
do command <- getInp "command"
|
||||
environ <- parseEnviron =<< getInp "state"
|
||||
case command of
|
||||
"possibilities" -> doPossibilities environ
|
||||
"provide_example" -> doProvideExample root cwd cache environ
|
||||
"abstract_example" -> doAbstractExample cwd cache environ
|
||||
"test_function" -> doTestFunction cwd cache environ
|
||||
_ -> throwCGIError 400 ("Unknown command: "++command) []
|
||||
|
||||
doPossibilities environ =
|
||||
do example_environ <- parseEnviron =<< getInp "example_state"
|
||||
outputJSONP (E.getNext environ example_environ)
|
||||
|
||||
doProvideExample root cwd cache environ =
|
||||
do Just lang <- readInput "lang"
|
||||
fun <- getCId "fun"
|
||||
parsePGF <- readParsePGF cwd cache
|
||||
let adjpath path = root</>makeRelative "/" (makeRelative root cwd</>path)
|
||||
pgf <- liftIO . readCache cache . adjpath =<< getInp "grammar"
|
||||
gen <- liftIO newStdGen
|
||||
let Just (e,s) = E.provideExample gen environ fun parsePGF pgf lang
|
||||
res = (showExpr [] e,s)
|
||||
liftIO $ logError $ "proveExample ... = "++show res
|
||||
outputJSONP res
|
||||
|
||||
doAbstractExample cwd cache environ =
|
||||
do example <- getInp "input"
|
||||
Just params <- readInput "params"
|
||||
absstr <- getInp "abstract"
|
||||
Just abs <- return $ readExpr absstr
|
||||
liftIO $ logError $ "abstract = "++showExpr [] abs
|
||||
Just cat <- readInput "cat"
|
||||
let t = mkType [] cat []
|
||||
parsePGF <- readParsePGF cwd cache
|
||||
let lang:_ = languages parsePGF
|
||||
ae <- liftIO $ abstractExample parsePGF environ lang t abs example
|
||||
outputJSONP (fmap (\(e,_)->(exprToAPI (instExpMeta params e),e)) ae)
|
||||
|
||||
abstractExample parsePGF env lang cat abs example =
|
||||
E.searchGoodTree env abs (parse parsePGF lang cat example)
|
||||
|
||||
doTestFunction cwd cache environ =
|
||||
do fun <- getCId "fun"
|
||||
parsePGF <- readParsePGF cwd cache
|
||||
let lang:_ = languages parsePGF
|
||||
Just txt <- return (E.testThis environ fun parsePGF lang)
|
||||
outputJSONP txt
|
||||
|
||||
getCId :: String -> CGI CId
|
||||
getCId name = maybe err return =<< fmap readCId (getInp name)
|
||||
where err = throwCGIError 400 ("Bad "++name) []
|
||||
{-
|
||||
getLimit :: CGI Int
|
||||
getLimit = maybe err return =<< readInput "limit"
|
||||
where err = throwCGIError 400 "Missing/bad limit" []
|
||||
-}
|
||||
|
||||
readParsePGF cwd cache =
|
||||
do parsepgf <- getInp "parser"
|
||||
liftIO $ readCache cache (cwd</>parsepgf)
|
||||
|
||||
parseEnviron s = do state <- liftIO $ readIO s
|
||||
return $ environ state
|
||||
|
||||
getInp name = maybe err (return . UTF8.decodeString) =<< getInput name
|
||||
where err = throwCGIError 400 ("Missing parameter: "++name) []
|
||||
|
||||
|
||||
instance JSON CId where
|
||||
showJSON = showJSON . show
|
||||
readJSON = (readResult =<<) . readJSON
|
||||
|
||||
instance JSON Expr where
|
||||
showJSON = showJSON . showExpr []
|
||||
readJSON = (m2r . readExpr =<<) . readJSON
|
||||
|
||||
m2r = maybe (Error "read failed") Ok
|
||||
|
||||
readResult s = case reads s of
|
||||
(x,r):_ | lex r==[("","")] -> Ok x
|
||||
_ -> Error "read failed"
|
||||
|
||||
--------------------------------------------------------------------------------
|
||||
-- cat lincat fun lin fun cat cat
|
||||
environ :: ([(CId, CId)],[(CId, Expr)],[((CId, CId), [CId])]) -> E.Environ
|
||||
environ (lincats,lins0,funs) =
|
||||
E.initial (fromList lincats) concmap fs allfs
|
||||
where
|
||||
concmap = fromList lins
|
||||
allfs = map E.mkFuncWithArg funs
|
||||
fs = [E.mkFuncWithArg f | f@((fn,_),_)<-funs, fn `elem` cns]
|
||||
cns = map fst lins
|
||||
lins = filter (not . E.isMeta .snd) lins0
|
||||
|
||||
|
||||
instExpMeta :: [CId] -> Expr -> Expr
|
||||
instExpMeta ps = fromJust . readExpr . instMeta ps . showExpr []
|
||||
|
||||
instMeta :: [CId] -> String -> String
|
||||
instMeta ps s =
|
||||
case break (=='?') s of
|
||||
(s1,'?':s2) ->
|
||||
case span isDigit s2 of
|
||||
(s21@(_:_),s22) -> s1++show (ps!!(read s21-1))++instMeta ps s22
|
||||
("",s22) -> s1++'?':instMeta ps s22
|
||||
(_,_) -> s
|
||||
15
src/example-based/exb-fcgi.hs
Normal file
@@ -0,0 +1,15 @@
|
||||
{-# LANGUAGE CPP #-}
|
||||
import Control.Concurrent(forkIO)
|
||||
import Network.FastCGI(runFastCGI,runFastCGIConcurrent')
|
||||
import ExampleService(cgiMain,newPGFCache)
|
||||
|
||||
main = do --stderrToFile logFile
|
||||
fcgiMain =<< newPGFCache
|
||||
|
||||
|
||||
fcgiMain cache =
|
||||
#ifndef mingw32_HOST_OS
|
||||
runFastCGIConcurrent' forkIO 100 (cgiMain cache)
|
||||
#else
|
||||
runFastCGI (cgiMain cache)
|
||||
#endif
|
||||
25
src/example-based/gf-exb.cabal
Normal file
@@ -0,0 +1,25 @@
|
||||
Name: gf-exb
|
||||
Version: 1.0
|
||||
Cabal-version: >= 1.8
|
||||
Build-type: Simple
|
||||
License: GPL
|
||||
Synopsis: Example-based grammar writing for the Grammatical Framework
|
||||
|
||||
executable exb.fcgi
|
||||
main-is: exb-fcgi.hs
|
||||
Hs-source-dirs: . ../server ../compiler ../runtime/haskell
|
||||
other-modules: ExampleService ExampleDemo
|
||||
FastCGIUtils Cache GF.Compile.ToAPI
|
||||
-- and a lot more...
|
||||
ghc-options: -threaded
|
||||
if impl(ghc>=7.0)
|
||||
ghc-options: -rtsopts
|
||||
|
||||
build-depends: base >=4.2 && <5, json, cgi, fastcgi, random,
|
||||
containers, old-time, directory, bytestring, utf8-string,
|
||||
pretty, array, mtl, fst, filepath
|
||||
|
||||
if os(windows)
|
||||
ghc-options: -optl-mwindows
|
||||
else
|
||||
build-depends: unix
|
||||
20
src/example-based/todo.txt
Normal file
@@ -0,0 +1,20 @@
|
||||
|
||||
Editor improvements for example-based grammar writing:
|
||||
+ Remove the same language from the example language menu
|
||||
+ Send the other language environment to getNext
|
||||
- Compile a new .pgf automatically when needed
|
||||
- Update buttons automatically when functions are added or removed
|
||||
- Switch over to using AbsParadigmsEng.pgf instead of the old exprToAPI function
|
||||
|
||||
Editor support for guided construction of linearization functions
|
||||
- enter api expressions by parsing them with AbsParadigmsEng.pgf in minibar
|
||||
- replace simpleParseInput with one that accepts quoted string literals
|
||||
- use lexcode/unlexcode in minibar
|
||||
- better support for literals in minibar (completion info from the PGF
|
||||
library should indicate if literals are acceptable)
|
||||
|
||||
Server support for example-based grammar writing:
|
||||
- Change getNext to use info from the example language
|
||||
- Random generator restricted to defined functions
|
||||
|
||||
- More testing
|
||||
489
src/pgf-binary/PGF/Data/Binary.hs
Normal file
@@ -0,0 +1,489 @@
|
||||
{-# LANGUAGE FlexibleInstances, FlexibleContexts #-}
|
||||
-- | This is a layer on top of "Data.Binary" with its own 'Binary' class
|
||||
-- and customised instances for 'Word', 'Int' and 'Double'.
|
||||
-- The 'Int' and 'Word' instance use a variable-length encoding to save space
|
||||
-- for small numbers. The 'Double' instance uses the standard IEEE754 encoding.
|
||||
module PGF.Data.Binary (
|
||||
|
||||
-- * The Binary class
|
||||
Binary(..)
|
||||
|
||||
-- * The Get and Put monads
|
||||
, Get , Put, runPut
|
||||
|
||||
-- * Useful helpers for writing instances
|
||||
, putWord8 , getWord8 , putWord16be , getWord16be
|
||||
|
||||
-- * Binary serialisation
|
||||
, encode , decode
|
||||
|
||||
-- * IO functions for serialisation
|
||||
, encodeFile , decodeFile
|
||||
|
||||
, encodeFile_ , decodeFile_
|
||||
|
||||
-- * Useful
|
||||
, Word8, Word16
|
||||
|
||||
) where
|
||||
|
||||
|
||||
import Data.Word
|
||||
|
||||
import qualified Data.Binary as Bin
|
||||
import Data.Binary.Put
|
||||
import Data.Binary.Get
|
||||
import Data.Binary.IEEE754 ( putFloat64be, getFloat64be)
|
||||
import Control.Monad
|
||||
import Control.Exception
|
||||
import Foreign
|
||||
import System.IO
|
||||
|
||||
import Data.ByteString.Lazy (ByteString)
|
||||
import qualified Data.ByteString.Lazy as L
|
||||
|
||||
--import Data.Char (chr,ord)
|
||||
--import Data.List (unfoldr)
|
||||
|
||||
-- And needed for the instances:
|
||||
import qualified Data.ByteString as B
|
||||
import qualified Data.Map as Map
|
||||
import qualified Data.Set as Set
|
||||
import qualified Data.IntMap as IntMap
|
||||
import qualified Data.IntSet as IntSet
|
||||
--import qualified Data.Ratio as R
|
||||
|
||||
--import qualified Data.Tree as T
|
||||
|
||||
import Data.Array.Unboxed
|
||||
|
||||
------------------------------------------------------------------------
|
||||
|
||||
-- | The @Binary@ class provides 'put' and 'get', methods to encode and
|
||||
-- decode a Haskell value to a lazy ByteString. It mirrors the Read and
|
||||
-- Show classes for textual representation of Haskell types, and is
|
||||
-- suitable for serialising Haskell values to disk, over the network.
|
||||
--
|
||||
-- For parsing and generating simple external binary formats (e.g. C
|
||||
-- structures), Binary may be used, but in general is not suitable
|
||||
-- for complex protocols. Instead use the Put and Get primitives
|
||||
-- directly.
|
||||
--
|
||||
-- Instances of Binary should satisfy the following property:
|
||||
--
|
||||
-- > decode . encode == id
|
||||
--
|
||||
-- That is, the 'get' and 'put' methods should be the inverse of each
|
||||
-- other. A range of instances are provided for basic Haskell types.
|
||||
--
|
||||
class Binary t where
|
||||
-- | Encode a value in the Put monad.
|
||||
put :: t -> Put
|
||||
-- | Decode a value in the Get monad
|
||||
get :: Get t
|
||||
|
||||
------------------------------------------------------------------------
|
||||
-- Wrappers to run the underlying monad
|
||||
|
||||
-- | Encode a value using binary serialisation to a lazy ByteString.
|
||||
--
|
||||
encode :: Binary a => a -> ByteString
|
||||
encode = runPut . put
|
||||
{-# INLINE encode #-}
|
||||
|
||||
-- | Decode a value from a lazy ByteString, reconstructing the original structure.
|
||||
--
|
||||
decode :: Binary a => ByteString -> a
|
||||
decode = runGet get
|
||||
|
||||
------------------------------------------------------------------------
|
||||
-- Convenience IO operations
|
||||
|
||||
-- | Lazily serialise a value to a file
|
||||
--
|
||||
-- This is just a convenience function, it's defined simply as:
|
||||
--
|
||||
-- > encodeFile f = B.writeFile f . encode
|
||||
--
|
||||
-- So for example if you wanted to compress as well, you could use:
|
||||
--
|
||||
-- > B.writeFile f . compress . encode
|
||||
--
|
||||
encodeFile :: Binary a => FilePath -> a -> IO ()
|
||||
encodeFile f v = L.writeFile f (encode v)
|
||||
|
||||
encodeFile_ :: FilePath -> Put -> IO ()
|
||||
encodeFile_ f m = L.writeFile f (runPut m)
|
||||
|
||||
-- | Lazily reconstruct a value previously written to a file.
|
||||
--
|
||||
-- This is just a convenience function, it's defined simply as:
|
||||
--
|
||||
-- > decodeFile f = return . decode =<< B.readFile f
|
||||
--
|
||||
-- So for example if you wanted to decompress as well, you could use:
|
||||
--
|
||||
-- > return . decode . decompress =<< B.readFile f
|
||||
--
|
||||
decodeFile :: Binary a => FilePath -> IO a
|
||||
decodeFile f = bracket (openBinaryFile f ReadMode) hClose $ \h -> do
|
||||
s <- L.hGetContents h
|
||||
evaluate $ runGet get s
|
||||
|
||||
decodeFile_ :: FilePath -> Get a -> IO a
|
||||
decodeFile_ f m = bracket (openBinaryFile f ReadMode) hClose $ \h -> do
|
||||
s <- L.hGetContents h
|
||||
evaluate $ runGet m s
|
||||
|
||||
------------------------------------------------------------------------
|
||||
-- For ground types, the standard instances can be reused,
|
||||
-- but for container types it would imply using
|
||||
-- the standard instances for all types of values in the container...
|
||||
|
||||
instance Binary () where put=Bin.put; get=Bin.get
|
||||
instance Binary Bool where put=Bin.put; get=Bin.get
|
||||
instance Binary Word8 where put=Bin.put; get=Bin.get
|
||||
instance Binary Word16 where put=Bin.put; get=Bin.get
|
||||
instance Binary Char where put=Bin.put; get=Bin.get
|
||||
|
||||
-- -- GF doesn't need these:
|
||||
--instance Binary Ordering where put=Bin.put; get=Bin.get
|
||||
--instance Binary Word32 where put=Bin.put; get=Bin.get
|
||||
--instance Binary Word64 where put=Bin.put; get=Bin.get
|
||||
--instance Binary Int8 where put=Bin.put; get=Bin.get
|
||||
--instance Binary Int16 where put=Bin.put; get=Bin.get
|
||||
--instance Binary Int32 where put=Bin.put; get=Bin.get
|
||||
|
||||
--instance Binary Int64 where put=Bin.put; get=Bin.get -- needed by instance Binary ByteString
|
||||
|
||||
------------------------------------------------------------------------
|
||||
|
||||
-- Words are written as sequence of bytes. The last bit of each
|
||||
-- byte indicates whether there are more bytes to be read
|
||||
instance Binary Word where
|
||||
put i | i <= 0x7f = do put a
|
||||
| i <= 0x3fff = do put (a .|. 0x80)
|
||||
put b
|
||||
| i <= 0x1fffff = do put (a .|. 0x80)
|
||||
put (b .|. 0x80)
|
||||
put c
|
||||
| i <= 0xfffffff = do put (a .|. 0x80)
|
||||
put (b .|. 0x80)
|
||||
put (c .|. 0x80)
|
||||
put d
|
||||
-- -- #if WORD_SIZE_IN_BITS < 64
|
||||
| otherwise = do put (a .|. 0x80)
|
||||
put (b .|. 0x80)
|
||||
put (c .|. 0x80)
|
||||
put (d .|. 0x80)
|
||||
put e
|
||||
{-
|
||||
-- Restricted to 32 bits even on 64-bit systems, so that negative
|
||||
-- Ints are written as 5 bytes instead of 10 bytes (TH 2013-02-13)
|
||||
--#else
|
||||
| i <= 0x7ffffffff = do put (a .|. 0x80)
|
||||
put (b .|. 0x80)
|
||||
put (c .|. 0x80)
|
||||
put (d .|. 0x80)
|
||||
put e
|
||||
| i <= 0x3ffffffffff = do put (a .|. 0x80)
|
||||
put (b .|. 0x80)
|
||||
put (c .|. 0x80)
|
||||
put (d .|. 0x80)
|
||||
put (e .|. 0x80)
|
||||
put f
|
||||
| i <= 0x1ffffffffffff = do put (a .|. 0x80)
|
||||
put (b .|. 0x80)
|
||||
put (c .|. 0x80)
|
||||
put (d .|. 0x80)
|
||||
put (e .|. 0x80)
|
||||
put (f .|. 0x80)
|
||||
put g
|
||||
| i <= 0xffffffffffffff = do put (a .|. 0x80)
|
||||
put (b .|. 0x80)
|
||||
put (c .|. 0x80)
|
||||
put (d .|. 0x80)
|
||||
put (e .|. 0x80)
|
||||
put (f .|. 0x80)
|
||||
put (g .|. 0x80)
|
||||
put h
|
||||
| i <= 0xffffffffffffff = do put (a .|. 0x80)
|
||||
put (b .|. 0x80)
|
||||
put (c .|. 0x80)
|
||||
put (d .|. 0x80)
|
||||
put (e .|. 0x80)
|
||||
put (f .|. 0x80)
|
||||
put (g .|. 0x80)
|
||||
put h
|
||||
| i <= 0x7fffffffffffffff = do put (a .|. 0x80)
|
||||
put (b .|. 0x80)
|
||||
put (c .|. 0x80)
|
||||
put (d .|. 0x80)
|
||||
put (e .|. 0x80)
|
||||
put (f .|. 0x80)
|
||||
put (g .|. 0x80)
|
||||
put (h .|. 0x80)
|
||||
put j
|
||||
| otherwise = do put (a .|. 0x80)
|
||||
put (b .|. 0x80)
|
||||
put (c .|. 0x80)
|
||||
put (d .|. 0x80)
|
||||
put (e .|. 0x80)
|
||||
put (f .|. 0x80)
|
||||
put (g .|. 0x80)
|
||||
put (h .|. 0x80)
|
||||
put (j .|. 0x80)
|
||||
put k
|
||||
-- #endif
|
||||
-}
|
||||
where
|
||||
a = fromIntegral ( i .&. 0x7f) :: Word8
|
||||
b = fromIntegral (shiftR i 7 .&. 0x7f) :: Word8
|
||||
c = fromIntegral (shiftR i 14 .&. 0x7f) :: Word8
|
||||
d = fromIntegral (shiftR i 21 .&. 0x7f) :: Word8
|
||||
e = fromIntegral (shiftR i 28 .&. 0x7f) :: Word8
|
||||
{-
|
||||
f = fromIntegral (shiftR i 35 .&. 0x7f) :: Word8
|
||||
g = fromIntegral (shiftR i 42 .&. 0x7f) :: Word8
|
||||
h = fromIntegral (shiftR i 49 .&. 0x7f) :: Word8
|
||||
j = fromIntegral (shiftR i 56 .&. 0x7f) :: Word8
|
||||
k = fromIntegral (shiftR i 63 .&. 0x7f) :: Word8
|
||||
-}
|
||||
get = do i <- getWord8
|
||||
(if i <= 0x7f
|
||||
then return (fromIntegral i)
|
||||
else do n <- get
|
||||
return $ (n `shiftL` 7) .|. (fromIntegral (i .&. 0x7f)))
|
||||
|
||||
-- Int has the same representation as Word
|
||||
instance Binary Int where
|
||||
put i = put (fromIntegral i :: Word)
|
||||
get = liftM toInt32 (get :: Get Word)
|
||||
where
|
||||
-- restrict to 32 bits (for PGF portability, TH 2013-02-13)
|
||||
toInt32 w = fromIntegral (fromIntegral w::Int32)::Int
|
||||
|
||||
------------------------------------------------------------------------
|
||||
--
|
||||
-- Portable, and pretty efficient, serialisation of Integer
|
||||
--
|
||||
|
||||
-- Fixed-size type for a subset of Integer
|
||||
--type SmallInt = Int32
|
||||
|
||||
-- Integers are encoded in two ways: if they fit inside a SmallInt,
|
||||
-- they're written as a byte tag, and that value. If the Integer value
|
||||
-- is too large to fit in a SmallInt, it is written as a byte array,
|
||||
-- along with a sign and length field.
|
||||
{-
|
||||
instance Binary Integer where
|
||||
|
||||
{-# INLINE put #-}
|
||||
put n | n >= lo && n <= hi = do
|
||||
putWord8 0
|
||||
put (fromIntegral n :: SmallInt) -- fast path
|
||||
where
|
||||
lo = fromIntegral (minBound :: SmallInt) :: Integer
|
||||
hi = fromIntegral (maxBound :: SmallInt) :: Integer
|
||||
|
||||
put n = do
|
||||
putWord8 1
|
||||
put sign
|
||||
put (unroll (abs n)) -- unroll the bytes
|
||||
where
|
||||
sign = fromIntegral (signum n) :: Word8
|
||||
|
||||
{-# INLINE get #-}
|
||||
get = do
|
||||
tag <- get :: Get Word8
|
||||
case tag of
|
||||
0 -> liftM fromIntegral (get :: Get SmallInt)
|
||||
_ -> do sign <- get
|
||||
bytes <- get
|
||||
let v = roll bytes
|
||||
return $! if sign == (1 :: Word8) then v else - v
|
||||
|
||||
--
|
||||
-- Fold and unfold an Integer to and from a list of its bytes
|
||||
--
|
||||
unroll :: Integer -> [Word8]
|
||||
unroll = unfoldr step
|
||||
where
|
||||
step 0 = Nothing
|
||||
step i = Just (fromIntegral i, i `shiftR` 8)
|
||||
|
||||
roll :: [Word8] -> Integer
|
||||
roll = foldr unstep 0
|
||||
where
|
||||
unstep b a = a `shiftL` 8 .|. fromIntegral b
|
||||
|
||||
instance (Binary a,Integral a) => Binary (R.Ratio a) where
|
||||
put r = put (R.numerator r) >> put (R.denominator r)
|
||||
get = liftM2 (R.%) get get
|
||||
-}
|
||||
|
||||
------------------------------------------------------------------------
|
||||
-- Instances for the first few tuples
|
||||
|
||||
instance (Binary a, Binary b) => Binary (a,b) where
|
||||
put (a,b) = put a >> put b
|
||||
get = liftM2 (,) get get
|
||||
|
||||
instance (Binary a, Binary b, Binary c) => Binary (a,b,c) where
|
||||
put (a,b,c) = put a >> put b >> put c
|
||||
get = liftM3 (,,) get get get
|
||||
|
||||
instance (Binary a, Binary b, Binary c, Binary d) => Binary (a,b,c,d) where
|
||||
put (a,b,c,d) = put a >> put b >> put c >> put d
|
||||
get = liftM4 (,,,) get get get get
|
||||
|
||||
instance (Binary a, Binary b, Binary c, Binary d, Binary e) => Binary (a,b,c,d,e) where
|
||||
put (a,b,c,d,e) = put a >> put b >> put c >> put d >> put e
|
||||
get = liftM5 (,,,,) get get get get get
|
||||
|
||||
--
|
||||
-- and now just recurse:
|
||||
--
|
||||
|
||||
instance (Binary a, Binary b, Binary c, Binary d, Binary e, Binary f)
|
||||
=> Binary (a,b,c,d,e,f) where
|
||||
put (a,b,c,d,e,f) = put (a,(b,c,d,e,f))
|
||||
get = do (a,(b,c,d,e,f)) <- get ; return (a,b,c,d,e,f)
|
||||
|
||||
instance (Binary a, Binary b, Binary c, Binary d, Binary e, Binary f, Binary g)
|
||||
=> Binary (a,b,c,d,e,f,g) where
|
||||
put (a,b,c,d,e,f,g) = put (a,(b,c,d,e,f,g))
|
||||
get = do (a,(b,c,d,e,f,g)) <- get ; return (a,b,c,d,e,f,g)
|
||||
|
||||
instance (Binary a, Binary b, Binary c, Binary d, Binary e,
|
||||
Binary f, Binary g, Binary h)
|
||||
=> Binary (a,b,c,d,e,f,g,h) where
|
||||
put (a,b,c,d,e,f,g,h) = put (a,(b,c,d,e,f,g,h))
|
||||
get = do (a,(b,c,d,e,f,g,h)) <- get ; return (a,b,c,d,e,f,g,h)
|
||||
|
||||
instance (Binary a, Binary b, Binary c, Binary d, Binary e,
|
||||
Binary f, Binary g, Binary h, Binary i)
|
||||
=> Binary (a,b,c,d,e,f,g,h,i) where
|
||||
put (a,b,c,d,e,f,g,h,i) = put (a,(b,c,d,e,f,g,h,i))
|
||||
get = do (a,(b,c,d,e,f,g,h,i)) <- get ; return (a,b,c,d,e,f,g,h,i)
|
||||
|
||||
instance (Binary a, Binary b, Binary c, Binary d, Binary e,
|
||||
Binary f, Binary g, Binary h, Binary i, Binary j)
|
||||
=> Binary (a,b,c,d,e,f,g,h,i,j) where
|
||||
put (a,b,c,d,e,f,g,h,i,j) = put (a,(b,c,d,e,f,g,h,i,j))
|
||||
get = do (a,(b,c,d,e,f,g,h,i,j)) <- get ; return (a,b,c,d,e,f,g,h,i,j)
|
||||
|
||||
------------------------------------------------------------------------
|
||||
-- Container types
|
||||
|
||||
instance Binary a => Binary [a] where
|
||||
put l = put (length l) >> mapM_ put l
|
||||
get = do n <- get :: Get Int
|
||||
xs <- replicateM n get
|
||||
return xs
|
||||
|
||||
instance (Binary a) => Binary (Maybe a) where
|
||||
put Nothing = putWord8 0
|
||||
put (Just x) = putWord8 1 >> put x
|
||||
get = do
|
||||
w <- getWord8
|
||||
case w of
|
||||
0 -> return Nothing
|
||||
_ -> liftM Just get
|
||||
|
||||
instance (Binary a, Binary b) => Binary (Either a b) where
|
||||
put (Left a) = putWord8 0 >> put a
|
||||
put (Right b) = putWord8 1 >> put b
|
||||
get = do
|
||||
w <- getWord8
|
||||
case w of
|
||||
0 -> liftM Left get
|
||||
_ -> liftM Right get
|
||||
|
||||
------------------------------------------------------------------------
|
||||
-- ByteStrings (have specially efficient instances)
|
||||
|
||||
instance Binary B.ByteString where
|
||||
put bs = do put (B.length bs)
|
||||
putByteString bs
|
||||
get = get >>= getByteString
|
||||
|
||||
--
|
||||
-- Using old versions of fps, this is a type synonym, and non portable
|
||||
--
|
||||
-- Requires 'flexible instances'
|
||||
--
|
||||
{-
|
||||
instance Binary ByteString where
|
||||
put bs = do put (fromIntegral (L.length bs) :: Int)
|
||||
putLazyByteString bs
|
||||
get = get >>= getLazyByteString
|
||||
-}
|
||||
------------------------------------------------------------------------
|
||||
-- Maps and Sets
|
||||
|
||||
instance (Ord a, Binary a) => Binary (Set.Set a) where
|
||||
put s = put (Set.size s) >> mapM_ put (Set.toAscList s)
|
||||
get = liftM Set.fromDistinctAscList get
|
||||
|
||||
instance (Ord k, Binary k, Binary e) => Binary (Map.Map k e) where
|
||||
put m = put (Map.size m) >> mapM_ put (Map.toAscList m)
|
||||
get = liftM Map.fromDistinctAscList get
|
||||
|
||||
instance Binary IntSet.IntSet where
|
||||
put s = put (IntSet.size s) >> mapM_ put (IntSet.toAscList s)
|
||||
get = liftM IntSet.fromDistinctAscList get
|
||||
|
||||
instance (Binary e) => Binary (IntMap.IntMap e) where
|
||||
put m = put (IntMap.size m) >> mapM_ put (IntMap.toAscList m)
|
||||
get = liftM IntMap.fromDistinctAscList get
|
||||
|
||||
------------------------------------------------------------------------
|
||||
-- Floating point
|
||||
|
||||
-- instance Binary Double where
|
||||
-- put d = put (decodeFloat d)
|
||||
-- get = liftM2 encodeFloat get get
|
||||
|
||||
instance Binary Double where
|
||||
put = putFloat64be
|
||||
get = getFloat64be
|
||||
{-
|
||||
instance Binary Float where
|
||||
put f = put (decodeFloat f)
|
||||
get = liftM2 encodeFloat get get
|
||||
-}
|
||||
------------------------------------------------------------------------
|
||||
-- Trees
|
||||
{-
|
||||
instance (Binary e) => Binary (T.Tree e) where
|
||||
put (T.Node r s) = put r >> put s
|
||||
get = liftM2 T.Node get get
|
||||
-}
|
||||
------------------------------------------------------------------------
|
||||
-- Arrays
|
||||
|
||||
instance (Binary i, Ix i, Binary e) => Binary (Array i e) where
|
||||
put a = do
|
||||
put (bounds a)
|
||||
put (rangeSize $ bounds a) -- write the length
|
||||
mapM_ put (elems a) -- now the elems.
|
||||
get = do
|
||||
bs <- get
|
||||
n <- get -- read the length
|
||||
xs <- replicateM n get -- now the elems.
|
||||
return (listArray bs xs)
|
||||
|
||||
--
|
||||
-- The IArray UArray e constraint is non portable. Requires flexible instances
|
||||
--
|
||||
instance (Binary i, Ix i, Binary e, IArray UArray e) => Binary (UArray i e) where
|
||||
put a = do
|
||||
put (bounds a)
|
||||
put (rangeSize $ bounds a) -- now write the length
|
||||
mapM_ put (elems a)
|
||||
get = do
|
||||
bs <- get
|
||||
n <- get
|
||||
xs <- replicateM n get
|
||||
return (listArray bs xs)
|
||||
27
src/pgf-binary/pgf-binary.cabal
Normal file
@@ -0,0 +1,27 @@
|
||||
name: pgf-binary
|
||||
version: 0.5
|
||||
|
||||
cabal-version: >= 1.10
|
||||
build-type: Simple
|
||||
license: BSD3
|
||||
--license-file: LICENSE
|
||||
synopsis: Custom version of the binary-0.5 package for the PGF library
|
||||
homepage: http://www.grammaticalframework.org/
|
||||
--bug-reports: http://code.google.com/p/grammatical-framework/issues/list
|
||||
maintainer: Thomas Hallgren
|
||||
stability: provisional
|
||||
category: Data, Parsing
|
||||
tested-with: GHC==7.4.2, GHC==7.8.3
|
||||
|
||||
source-repository head
|
||||
type: darcs
|
||||
location: http://www.grammaticalframework.org/
|
||||
|
||||
Library
|
||||
default-language: Haskell2010
|
||||
build-depends: base >= 4.3 && <5, binary, data-binary-ieee754,
|
||||
containers, array, bytestring
|
||||
exposed-modules: PGF.Data.Binary
|
||||
|
||||
ghc-options: -fwarn-unused-imports -O2
|
||||
extensions: FlexibleInstances, FlexibleContexts
|
||||
@@ -76,9 +76,27 @@ typedef GuSeq PgfEquations;
|
||||
|
||||
typedef void *PgfFunction;
|
||||
|
||||
typedef enum {
|
||||
PGF_DEP_PRAGMA_HEAD,
|
||||
PGF_DEP_PRAGMA_MOD,
|
||||
PGF_DEP_PRAGMA_REL,
|
||||
PGF_DEP_PRAGMA_SKIP,
|
||||
PGF_DEP_PRAGMA_ANCH,
|
||||
PGF_DEP_PRAGMA_TAGS
|
||||
} PgfDepPragmaTag;
|
||||
|
||||
typedef struct {
|
||||
PgfDepPragmaTag tag;
|
||||
size_t index;
|
||||
GuString label;
|
||||
} PgfDepPragma;
|
||||
|
||||
typedef GuSeq PgfDepPragmas;
|
||||
|
||||
typedef struct {
|
||||
PgfCId name;
|
||||
PgfType* type;
|
||||
PgfDepPragmas* pragmas;
|
||||
int arity;
|
||||
PgfEquations* defns; // maybe null
|
||||
PgfExprProb ep;
|
||||
@@ -119,7 +137,6 @@ typedef struct {
|
||||
PgfFlags* aflags;
|
||||
PgfAbsFuns* funs;
|
||||
PgfAbsCats* cats;
|
||||
PgfAbsFun* abs_lin_fun;
|
||||
PgfEvalGates* eval_gates;
|
||||
} PgfAbstr;
|
||||
|
||||
@@ -262,8 +279,8 @@ typedef struct {
|
||||
typedef GuSeq PgfSequences;
|
||||
|
||||
typedef struct {
|
||||
PgfAbsFun* absfun;
|
||||
PgfExprProb *ep;
|
||||
GuSeq* absfuns;
|
||||
prob_t prob;
|
||||
int funid;
|
||||
size_t n_lins;
|
||||
PgfSequence* lins[];
|
||||
|
||||
@@ -413,3 +413,304 @@ pgf_graphviz_word_alignment(PgfConcr** concrs, size_t n_concrs, PgfExpr expr, Pg
|
||||
|
||||
gu_pool_free(tmp_pool);
|
||||
}
|
||||
|
||||
typedef struct {
|
||||
PgfPGF* pgf;
|
||||
int next_fid;
|
||||
GuBuf* anchors;
|
||||
GuBuf* heads;
|
||||
GuPool* pool;
|
||||
} PgfDepGenState;
|
||||
|
||||
typedef struct {
|
||||
int fid;
|
||||
int visit;
|
||||
PgfExpr expr;
|
||||
GuBuf* edges;
|
||||
} PgfDepNode;
|
||||
|
||||
typedef struct {
|
||||
GuString label;
|
||||
PgfDepNode* node;
|
||||
} PgfDepEdge;
|
||||
|
||||
typedef struct {
|
||||
bool solved;
|
||||
size_t start;
|
||||
size_t end;
|
||||
GuString label;
|
||||
} PgfDepStackRange;
|
||||
|
||||
static void
|
||||
pgf_graphviz_dependency_graph_(PgfDepGenState* state,
|
||||
size_t parents_start,size_t parents_end,
|
||||
GuString head_label, GuString mod_label,
|
||||
PgfExpr expr);
|
||||
|
||||
static bool
|
||||
pgf_graphviz_dependency_graph_apply(PgfDepGenState* state,
|
||||
size_t parents_start,size_t parents_end,
|
||||
GuString head_label, GuString mod_label,
|
||||
GuBuf* args, GuSeq* pragmas)
|
||||
{
|
||||
size_t n_args = gu_buf_length(args);
|
||||
size_t n_pragmas = pragmas ? gu_seq_length(pragmas) : 0;
|
||||
|
||||
size_t n_count = (n_args <= n_pragmas) ? n_args : n_pragmas;
|
||||
PgfDepStackRange ranges[n_count+1];
|
||||
for (size_t i = 0; i <= n_count; i++) {
|
||||
ranges[i].solved = false;
|
||||
ranges[i].label =
|
||||
(i > 0) ? gu_seq_index(pragmas, PgfDepPragma, i-1)->label
|
||||
: NULL;
|
||||
}
|
||||
|
||||
ranges[0].start = gu_buf_length(state->heads);
|
||||
ranges[0].end = gu_buf_length(state->heads);
|
||||
|
||||
bool rel_solved = false;
|
||||
size_t n_solved = 0;
|
||||
size_t count = 0;
|
||||
while (n_solved < n_count) {
|
||||
if (!ranges[0].solved) {
|
||||
ranges[0].start = gu_buf_length(state->heads);
|
||||
}
|
||||
|
||||
for (size_t i = 0; i < n_count; i++) {
|
||||
if (ranges[i+1].solved)
|
||||
continue;
|
||||
|
||||
PgfExpr arg = gu_buf_get(args, PgfExpr, n_args-i-1);
|
||||
PgfDepPragma* pragma = gu_seq_index(pragmas, PgfDepPragma, i);
|
||||
|
||||
switch (pragma->tag) {
|
||||
case PGF_DEP_PRAGMA_MOD:
|
||||
assert(pragma->index <= n_count);
|
||||
if (ranges[0].solved && ranges[pragma->index].solved) {
|
||||
ranges[i+1].start = gu_buf_length(state->heads);
|
||||
pgf_graphviz_dependency_graph_(state,
|
||||
ranges[pragma->index].start, ranges[pragma->index].end,
|
||||
NULL, ranges[i+1].label,
|
||||
arg);
|
||||
ranges[i+1].end = gu_buf_length(state->heads);
|
||||
ranges[i+1].solved= true;
|
||||
n_solved++;
|
||||
}
|
||||
break;
|
||||
case PGF_DEP_PRAGMA_REL:
|
||||
ranges[i+1].solved = true;
|
||||
ranges[i+1].start = 0;
|
||||
ranges[i+1].end = 0;
|
||||
n_solved++;
|
||||
|
||||
GuPool *tmp_pool = gu_local_pool();
|
||||
|
||||
GuStringBuf* sbuf =
|
||||
gu_new_string_buf(tmp_pool);
|
||||
GuOut* out = gu_string_buf_out(sbuf);
|
||||
GuExn* err = gu_new_exn(tmp_pool);
|
||||
|
||||
pgf_print_expr(arg, NULL, 0, out, err);
|
||||
|
||||
ranges[pragma->index].label =
|
||||
gu_string_buf_freeze(sbuf, state->pool);
|
||||
|
||||
gu_pool_free(tmp_pool);
|
||||
break;
|
||||
case PGF_DEP_PRAGMA_SKIP:
|
||||
ranges[i+1].solved = true;
|
||||
n_solved++;
|
||||
break;
|
||||
case PGF_DEP_PRAGMA_ANCH:
|
||||
if (ranges[0].solved) {
|
||||
ranges[i+1].start = gu_buf_length(state->heads);
|
||||
pgf_graphviz_dependency_graph_(state,0,0,"ROOT","ROOT",arg);
|
||||
ranges[i+1].end = gu_buf_length(state->heads);
|
||||
ranges[i+1].solved= true;
|
||||
n_solved++;
|
||||
count++;
|
||||
}
|
||||
break;
|
||||
case PGF_DEP_PRAGMA_HEAD:
|
||||
if (!rel_solved)
|
||||
break;
|
||||
|
||||
if (!ranges[0].solved) {
|
||||
GuString new_head_label = head_label;
|
||||
GuString new_mod_label = mod_label;
|
||||
if (pragma->label != NULL && *pragma->label && pragma->index == 0) {
|
||||
new_head_label = pragma->label;
|
||||
new_mod_label = "ROOT";
|
||||
}
|
||||
if (ranges[0].label != NULL)
|
||||
new_mod_label = ranges[0].label;
|
||||
ranges[i+1].start = gu_buf_length(state->heads);
|
||||
pgf_graphviz_dependency_graph_(state,
|
||||
parents_start,parents_end,
|
||||
new_head_label, new_mod_label,
|
||||
arg);
|
||||
ranges[i+1].end = gu_buf_length(state->heads);
|
||||
if (pragma->index == 0) {
|
||||
ranges[i+1].solved = true;
|
||||
n_solved++;
|
||||
}
|
||||
count++;
|
||||
}
|
||||
if (pragma->index != 0 && ranges[pragma->index].solved) {
|
||||
for (size_t j = ranges[pragma->index].start; j < ranges[pragma->index].end; j++) {
|
||||
PgfDepNode* parent = gu_buf_get(state->heads, PgfDepNode*, j);
|
||||
for (size_t k = ranges[i+1].start; k < ranges[i+1].end; k++) {
|
||||
PgfDepNode* child = gu_buf_get(state->heads, PgfDepNode*, k);
|
||||
PgfDepEdge* edge = gu_buf_extend(parent->edges);
|
||||
edge->label = pragma->label;
|
||||
edge->node = child;
|
||||
}
|
||||
}
|
||||
ranges[i+1].solved = true;
|
||||
n_solved++;
|
||||
}
|
||||
break;
|
||||
default:
|
||||
gu_impossible();
|
||||
}
|
||||
}
|
||||
|
||||
if (rel_solved) {
|
||||
if (!ranges[0].solved) {
|
||||
ranges[0].end = gu_buf_length(state->heads);
|
||||
ranges[0].solved = true;
|
||||
}
|
||||
} else {
|
||||
rel_solved = true;
|
||||
}
|
||||
}
|
||||
|
||||
gu_buf_trim_n(state->heads, gu_buf_length(state->heads)-ranges[0].end);
|
||||
|
||||
return (count > 0);
|
||||
}
|
||||
|
||||
static void
|
||||
pgf_graphviz_dependency_graph_(PgfDepGenState* state,
|
||||
size_t parents_start,size_t parents_end,
|
||||
GuString head_label, GuString mod_label,
|
||||
PgfExpr expr)
|
||||
{
|
||||
PgfExpr e = expr;
|
||||
GuBuf* args = gu_new_buf(PgfDepNode*, state->pool);
|
||||
|
||||
for (;;) {
|
||||
GuVariantInfo ei = gu_variant_open(e);
|
||||
switch (ei.tag) {
|
||||
case PGF_EXPR_APP: {
|
||||
PgfExprApp* app = ei.data;
|
||||
gu_buf_push(args, PgfExpr, app->arg);
|
||||
e = app->fun;
|
||||
break;
|
||||
}
|
||||
case PGF_EXPR_TYPED: {
|
||||
PgfExprTyped* typed = ei.data;
|
||||
e = typed->expr;
|
||||
break;
|
||||
}
|
||||
case PGF_EXPR_IMPL_ARG: {
|
||||
PgfExprImplArg* implarg = ei.data;
|
||||
e = implarg->expr;
|
||||
break;
|
||||
}
|
||||
case PGF_EXPR_FUN: {
|
||||
PgfExprFun* fun = ei.data;
|
||||
PgfAbsFun* absfun =
|
||||
gu_seq_binsearch(state->pgf->abstract.funs, pgf_absfun_order, PgfAbsFun, fun->fun);
|
||||
|
||||
if (pgf_graphviz_dependency_graph_apply(state,
|
||||
parents_start,parents_end,
|
||||
head_label,mod_label,
|
||||
args,absfun ? absfun->pragmas : NULL))
|
||||
return;
|
||||
// continue to default
|
||||
}
|
||||
default: {
|
||||
PgfDepNode* node = gu_new(PgfDepNode, state->pool);
|
||||
node->fid = state->next_fid++;
|
||||
node->visit = 0;
|
||||
node->expr = expr;
|
||||
node->edges = gu_new_buf(PgfDepEdge, state->pool);
|
||||
|
||||
for (size_t i = parents_start; i < parents_end; i++) {
|
||||
PgfDepNode* parent = gu_buf_get(state->heads, PgfDepNode*, i);
|
||||
if (head_label == NULL) {
|
||||
PgfDepEdge* edge = gu_buf_extend(parent->edges);
|
||||
edge->label = mod_label;
|
||||
edge->node = node;
|
||||
} else {
|
||||
PgfDepEdge* edge = gu_buf_extend(node->edges);
|
||||
edge->label = head_label;
|
||||
edge->node = parent;
|
||||
}
|
||||
}
|
||||
|
||||
gu_buf_push(state->heads, PgfDepNode*, node);
|
||||
if (head_label != NULL)
|
||||
gu_buf_push(state->anchors, PgfDepNode*, node);
|
||||
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static void
|
||||
pgf_graphviz_print_graph(PgfGraphvizOptions* opts, PgfDepNode* node,
|
||||
GuOut* out, GuExn* err)
|
||||
{
|
||||
if (node->visit++ > 0)
|
||||
return;
|
||||
|
||||
gu_printf(out, err, " n%d[label = \"", node->fid);
|
||||
pgf_print_expr(node->expr, NULL, 0, out, err);
|
||||
if (opts->nodeColor != NULL && *opts->nodeColor)
|
||||
gu_printf(out, err, ", fontcolor = \"%s\"", opts->nodeColor);
|
||||
if (opts->nodeFont != NULL && *opts->nodeFont)
|
||||
gu_printf(out, err, ", fontname = \"%s\"", opts->nodeFont);
|
||||
gu_puts("\"]\n", out, err);
|
||||
|
||||
size_t n_children = gu_buf_length(node->edges);
|
||||
for (size_t i = 0; i < n_children; i++) {
|
||||
PgfDepEdge* edge = gu_buf_index(node->edges, PgfDepEdge, n_children-i-1);
|
||||
gu_printf(out, err, " n%d -> n%d [label = \"%s\"",
|
||||
node->fid, edge->node->fid, edge->label);
|
||||
if (opts->nodeEdgeStyle != NULL && *opts->nodeEdgeStyle)
|
||||
gu_printf(out, err, ", style = \"%s\"", opts->nodeEdgeStyle);
|
||||
if (opts->nodeColor != NULL && *opts->nodeColor)
|
||||
gu_printf(out, err, ", color = \"%s\"", opts->nodeColor);
|
||||
gu_puts("]\n", out, err);
|
||||
|
||||
if (edge->node->fid > node->fid)
|
||||
pgf_graphviz_print_graph(opts, edge->node, out, err);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
pgf_graphviz_dependency_graph(PgfPGF* pgf, PgfExpr expr,
|
||||
PgfGraphvizOptions* opts,
|
||||
GuOut* out, GuExn* err,
|
||||
GuPool* pool)
|
||||
{
|
||||
PgfDepGenState state;
|
||||
state.pgf = pgf;
|
||||
state.next_fid = 1;
|
||||
state.pool = pool;
|
||||
state.anchors = gu_new_buf(PgfDepNode*, pool);
|
||||
state.heads = gu_new_buf(PgfDepNode*, pool);
|
||||
|
||||
pgf_graphviz_dependency_graph_(&state, 0, 0, "ROOT", "ROOT", expr);
|
||||
|
||||
gu_puts("digraph {\n", out, err);
|
||||
size_t n_anchors = gu_buf_length(state.anchors);
|
||||
for (size_t i = 0; i < n_anchors; i++) {
|
||||
PgfDepNode* node = gu_buf_get(state.anchors, PgfDepNode*, i);
|
||||
pgf_graphviz_print_graph(opts,node,out,err);
|
||||
}
|
||||
gu_puts("}", out, err);
|
||||
}
|
||||
|
||||
@@ -40,15 +40,23 @@ pgf_lzr_index(PgfConcr* concr,
|
||||
switch (gu_variant_tag(prod)) {
|
||||
case PGF_PRODUCTION_APPLY: {
|
||||
PgfProductionApply* papply = data;
|
||||
PgfCncOverloadMap* overl_table =
|
||||
gu_map_get(concr->fun_indices, papply->fun->absfun->name,
|
||||
PgfCncOverloadMap*);
|
||||
if (!overl_table) {
|
||||
overl_table = gu_new_addr_map(PgfCCat*, GuBuf*, &gu_null_struct, pool);
|
||||
gu_map_put(concr->fun_indices,
|
||||
papply->fun->absfun->name, PgfCncOverloadMap*, overl_table);
|
||||
|
||||
size_t n_absfuns = gu_seq_length(papply->fun->absfuns);
|
||||
for (size_t i = 0; i < n_absfuns; i++) {
|
||||
PgfAbsFun* absfun =
|
||||
gu_seq_get(papply->fun->absfuns, PgfAbsFun*, i);
|
||||
|
||||
PgfCncOverloadMap* overl_table =
|
||||
gu_map_get(concr->fun_indices, absfun->name,
|
||||
PgfCncOverloadMap*);
|
||||
if (!overl_table) {
|
||||
overl_table = gu_new_addr_map(PgfCCat*, GuBuf*, &gu_null_struct, pool);
|
||||
gu_map_put(concr->fun_indices,
|
||||
absfun->name,
|
||||
PgfCncOverloadMap*, overl_table);
|
||||
}
|
||||
pgf_lzr_add_overl_entry(overl_table, ccat, papply, pool);
|
||||
}
|
||||
pgf_lzr_add_overl_entry(overl_table, ccat, papply, pool);
|
||||
break;
|
||||
}
|
||||
case PGF_PRODUCTION_COERCE: {
|
||||
@@ -148,7 +156,7 @@ pgf_cnc_resolve(PgfCnc* cnc,
|
||||
static PgfCncTree
|
||||
pgf_cnc_resolve_app(PgfCnc* cnc,
|
||||
size_t n_vars, PgfPrintContext* context,
|
||||
PgfCCat* ccat, GuBuf* buf, GuBuf* args,
|
||||
PgfCCat* ccat, PgfCId abs_id, GuBuf* buf, GuBuf* args,
|
||||
GuPool* pool)
|
||||
{
|
||||
GuChoiceMark mark = gu_choice_mark(cnc->ch);
|
||||
@@ -164,6 +172,7 @@ pgf_cnc_resolve_app(PgfCnc* cnc,
|
||||
capp->ccat = ccat;
|
||||
capp->n_vars = n_vars;
|
||||
capp->context = context;
|
||||
capp->abs_id = abs_id;
|
||||
|
||||
redo:;
|
||||
int index = gu_choice_next(cnc->ch, gu_buf_length(buf));
|
||||
@@ -175,7 +184,6 @@ redo:;
|
||||
gu_buf_get(buf, PgfProductionApply*, index);
|
||||
gu_assert(n_args == gu_seq_length(papply->args));
|
||||
|
||||
capp->abs_id = papply->fun->absfun->name;
|
||||
capp->fun = papply->fun;
|
||||
capp->fid = 0;
|
||||
capp->n_args = n_args;
|
||||
@@ -470,7 +478,7 @@ redo:;
|
||||
gu_map_iter(overl_table, &clo.fn, NULL);
|
||||
assert(clo.ccat != NULL && clo.buf != NULL);
|
||||
|
||||
ret = pgf_cnc_resolve_app(cnc, n_vars, context, clo.ccat, clo.buf, args, pool);
|
||||
ret = pgf_cnc_resolve_app(cnc, n_vars, context, clo.ccat, efun->fun, clo.buf, args, pool);
|
||||
if (gu_variant_is_null(ret)) {
|
||||
gu_choice_reset(cnc->ch, mark);
|
||||
if (gu_choice_advance(cnc->ch))
|
||||
@@ -483,7 +491,7 @@ redo:;
|
||||
goto done;
|
||||
}
|
||||
|
||||
ret = pgf_cnc_resolve_app(cnc, n_vars, context, ccat, buf, args, pool);
|
||||
ret = pgf_cnc_resolve_app(cnc, n_vars, context, ccat, efun->fun, buf, args, pool);
|
||||
}
|
||||
goto done;
|
||||
}
|
||||
|
||||
@@ -803,7 +803,12 @@ pgf_lookup_ctree_to_expr(PgfCncTree ctree, PgfExprProb* ep,
|
||||
switch (cti.tag) {
|
||||
case PGF_CNC_TREE_APP: {
|
||||
PgfCncTreeApp* fapp = cti.data;
|
||||
*ep = fapp->fun->absfun->ep;
|
||||
if (gu_seq_length(fapp->fun->absfuns) > 0)
|
||||
*ep = gu_seq_get(fapp->fun->absfuns, PgfAbsFun*, 0)->ep;
|
||||
else {
|
||||
ep->expr = gu_null_variant;
|
||||
ep->prob = fapp->fun->prob;
|
||||
}
|
||||
n_args = fapp->n_args;
|
||||
args = fapp->args;
|
||||
break;
|
||||
@@ -923,8 +928,15 @@ pgf_lookup_sentence(PgfConcr* concr, PgfType* typ, GuString sentence, GuPool* po
|
||||
size_t n_cncfuns = gu_seq_length(concr->cncfuns);
|
||||
for (size_t i = 0; i < n_cncfuns; i++) {
|
||||
PgfCncFun* cncfun = gu_seq_get(concr->cncfuns, PgfCncFun*, i);
|
||||
for (size_t lin_idx = 0; lin_idx < cncfun->n_lins; lin_idx++) {
|
||||
pgf_lookup_index_syms(lexicon_idx, cncfun->lins[lin_idx]->syms, cncfun->absfun, pool);
|
||||
|
||||
size_t n_absfuns = gu_seq_length(cncfun->absfuns);
|
||||
for (size_t j = 0; j < n_absfuns; j++) {
|
||||
PgfAbsFun* absfun =
|
||||
gu_seq_get(cncfun->absfuns, PgfAbsFun*, j);
|
||||
|
||||
for (size_t lin_idx = 0; lin_idx < cncfun->n_lins; lin_idx++) {
|
||||
pgf_lookup_index_syms(lexicon_idx, cncfun->lins[lin_idx]->syms, absfun, pool);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -710,8 +710,8 @@ pgf_new_item(PgfParsing* ps, PgfItemConts* conts, PgfProduction prod)
|
||||
case PGF_PRODUCTION_APPLY: {
|
||||
PgfProductionApply* papp = pi.data;
|
||||
item->args = papp->args;
|
||||
item->inside_prob = papp->fun->ep->prob;
|
||||
|
||||
item->inside_prob = papp->fun->prob;
|
||||
|
||||
int n_args = gu_seq_length(item->args);
|
||||
for (int i = 0; i < n_args; i++) {
|
||||
PgfPArg *arg = gu_seq_index(item->args, PgfPArg, i);
|
||||
@@ -1265,8 +1265,12 @@ pgf_parsing_add_transition(PgfParsing* ps, PgfToken tok, PgfItem* item)
|
||||
ps->tp = gu_new(PgfTokenProb, ps->out_pool);
|
||||
ps->tp->tok = tok;
|
||||
ps->tp->cat = item->conts->ccat->cnccat->abscat->name;
|
||||
ps->tp->fun = papp->fun->absfun->name;
|
||||
ps->tp->prob = item->inside_prob + item->conts->outside_prob;
|
||||
ps->tp->fun = "_";
|
||||
|
||||
if (gu_seq_length(papp->fun->absfuns) > 0)
|
||||
ps->tp->fun =
|
||||
gu_seq_get(papp->fun->absfuns, PgfAbsFun*, 0)->name;
|
||||
}
|
||||
} else {
|
||||
if (!ps->before->needs_bind && cmp_string(¤t, tok, ps->case_sensitive) == 0) {
|
||||
@@ -1794,19 +1798,25 @@ pgf_result_production(PgfParsing* ps,
|
||||
case PGF_PRODUCTION_APPLY: {
|
||||
PgfProductionApply* papp = pi.data;
|
||||
|
||||
PgfExprState *st = gu_new(PgfExprState, ps->pool);
|
||||
st->answers = answers;
|
||||
st->ep = *papp->fun->ep;
|
||||
st->args = papp->args;
|
||||
st->arg_idx = 0;
|
||||
size_t n_absfuns = gu_seq_length(papp->fun->absfuns);
|
||||
for (size_t i = 0; i < n_absfuns; i++) {
|
||||
PgfAbsFun* absfun =
|
||||
gu_seq_get(papp->fun->absfuns, PgfAbsFun*, i);
|
||||
|
||||
size_t n_args = gu_seq_length(st->args);
|
||||
for (size_t k = 0; k < n_args; k++) {
|
||||
PgfPArg* parg = gu_seq_index(st->args, PgfPArg, k);
|
||||
st->ep.prob += parg->ccat->viterbi_prob;
|
||||
PgfExprState *st = gu_new(PgfExprState, ps->pool);
|
||||
st->answers = answers;
|
||||
st->ep = absfun->ep;
|
||||
st->args = papp->args;
|
||||
st->arg_idx = 0;
|
||||
|
||||
size_t n_args = gu_seq_length(st->args);
|
||||
for (size_t k = 0; k < n_args; k++) {
|
||||
PgfPArg* parg = gu_seq_index(st->args, PgfPArg, k);
|
||||
st->ep.prob += parg->ccat->viterbi_prob;
|
||||
}
|
||||
|
||||
gu_buf_heap_push(ps->expr_queue, &pgf_expr_state_order, &st);
|
||||
}
|
||||
|
||||
gu_buf_heap_push(ps->expr_queue, &pgf_expr_state_order, &st);
|
||||
break;
|
||||
}
|
||||
case PGF_PRODUCTION_COERCE: {
|
||||
@@ -2355,15 +2365,20 @@ pgf_morpho_iter(PgfProductionIdx* idx,
|
||||
PgfProductionIdxEntry* entry =
|
||||
gu_buf_index(idx, PgfProductionIdxEntry, i);
|
||||
|
||||
PgfCId lemma = entry->papp->fun->absfun->name;
|
||||
GuString analysis = entry->ccat->cnccat->labels[entry->lin_idx];
|
||||
|
||||
prob_t prob = entry->ccat->cnccat->abscat->prob +
|
||||
entry->papp->fun->absfun->ep.prob;
|
||||
callback->callback(callback,
|
||||
lemma, analysis, prob, err);
|
||||
if (!gu_ok(err))
|
||||
return;
|
||||
size_t n_absfuns = gu_seq_length(entry->papp->fun->absfuns);
|
||||
for (size_t j = 0; j < n_absfuns; j++) {
|
||||
PgfAbsFun* absfun =
|
||||
gu_seq_get(entry->papp->fun->absfuns, PgfAbsFun*, j);
|
||||
PgfCId lemma = absfun->name;
|
||||
GuString analysis = entry->ccat->cnccat->labels[entry->lin_idx];
|
||||
|
||||
prob_t prob = entry->ccat->cnccat->abscat->prob +
|
||||
absfun->ep.prob;
|
||||
callback->callback(callback,
|
||||
lemma, analysis, prob, err);
|
||||
if (!gu_ok(err))
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2569,7 +2584,7 @@ pgf_ccat_set_viterbi_prob(PgfCCat* ccat) {
|
||||
return INFINITY;
|
||||
|
||||
prob_t viterbi_prob = INFINITY;
|
||||
|
||||
|
||||
size_t n_prods = gu_seq_length(ccat->prods);
|
||||
for (size_t i = 0; i < n_prods; i++) {
|
||||
PgfProduction prod =
|
||||
@@ -2581,7 +2596,7 @@ pgf_ccat_set_viterbi_prob(PgfCCat* ccat) {
|
||||
switch (inf.tag) {
|
||||
case PGF_PRODUCTION_APPLY: {
|
||||
PgfProductionApply* papp = inf.data;
|
||||
prob = papp->fun->ep->prob;
|
||||
prob = papp->fun->prob;
|
||||
|
||||
size_t n_args = gu_seq_length(papp->args);
|
||||
for (size_t j = 0; j < n_args; j++) {
|
||||
|
||||
@@ -60,7 +60,44 @@ pgf_print_absfuns(PgfAbsFuns* absfuns, GuOut *out, GuExn* err)
|
||||
pgf_print_cid(absfun->name, out, err);
|
||||
gu_puts(" : ", out, err);
|
||||
pgf_print_type(absfun->type, NULL, 0, out, err);
|
||||
gu_printf(out, err, " ; -- %f\n", absfun->ep.prob);
|
||||
gu_printf(out, err, " ; -- %f ", absfun->ep.prob);
|
||||
|
||||
size_t n_pragmas = gu_seq_length(absfun->pragmas);
|
||||
for (size_t i = 0; i < n_pragmas; i++) {
|
||||
PgfDepPragma* pragma =
|
||||
gu_seq_index(absfun->pragmas, PgfDepPragma, i);
|
||||
switch (pragma->tag) {
|
||||
case PGF_DEP_PRAGMA_HEAD:
|
||||
gu_puts("head",out,err);
|
||||
if (pragma->index > 0)
|
||||
gu_printf(out,err,":%d", pragma->index);
|
||||
if (pragma->label != NULL && *pragma->label != 0)
|
||||
gu_printf(out,err,":%s", pragma->label);
|
||||
break;
|
||||
case PGF_DEP_PRAGMA_MOD:
|
||||
gu_puts(pragma->label, out,err);
|
||||
if (pragma->index > 0)
|
||||
gu_printf(out,err,":%d", pragma->index);
|
||||
break;
|
||||
case PGF_DEP_PRAGMA_REL:
|
||||
gu_puts("rel",out,err);
|
||||
if (pragma->index > 0)
|
||||
gu_printf(out,err,":%d", pragma->index);
|
||||
break;
|
||||
case PGF_DEP_PRAGMA_SKIP:
|
||||
gu_puts("_",out,err);
|
||||
break;
|
||||
case PGF_DEP_PRAGMA_ANCH:
|
||||
gu_puts("anchor",out,err);
|
||||
break;
|
||||
default:
|
||||
gu_impossible();
|
||||
}
|
||||
|
||||
gu_putc(' ', out, err);
|
||||
}
|
||||
|
||||
gu_putc('\n', out, err);
|
||||
}
|
||||
}
|
||||
static void
|
||||
@@ -206,15 +243,17 @@ pgf_print_cncfun(PgfCncFun *cncfun, PgfSequences* sequences,
|
||||
gu_printf(out,err,"S%d", (seq - ((PgfSequence*) gu_seq_data(sequences))));
|
||||
}
|
||||
|
||||
gu_puts(")", out, err);
|
||||
|
||||
if (cncfun->absfun != NULL) {
|
||||
gu_puts(" [", out, err);
|
||||
pgf_print_cid(cncfun->absfun->name, out, err);
|
||||
gu_puts("]", out, err);
|
||||
gu_puts(") [", out, err);
|
||||
|
||||
size_t n_absfuns = gu_seq_length(cncfun->absfuns);
|
||||
for (size_t i = 0; i < n_absfuns; i++) {
|
||||
PgfAbsFun* absfun =
|
||||
gu_seq_get(cncfun->absfuns, PgfAbsFun*, i);
|
||||
|
||||
pgf_print_cid(absfun->name, out, err);
|
||||
}
|
||||
|
||||
gu_puts("\n", out, err);
|
||||
|
||||
gu_puts("]\n", out, err);
|
||||
}
|
||||
|
||||
static void
|
||||
|
||||
@@ -407,6 +407,45 @@ pgf_read_patt(PgfReader* rdr)
|
||||
return patt;
|
||||
}
|
||||
|
||||
static PgfDepPragmas*
|
||||
pgf_read_deppragmas(PgfReader* rdr)
|
||||
{
|
||||
size_t n_pragmas = pgf_read_len(rdr);
|
||||
gu_return_on_exn(rdr->err, NULL);
|
||||
|
||||
GuSeq* pragmas = gu_new_seq(PgfDepPragma, n_pragmas, rdr->opool);
|
||||
for (size_t i = 0; i < n_pragmas; i++) {
|
||||
PgfDepPragma* pragma = gu_seq_index(pragmas, PgfDepPragma, i);
|
||||
pragma->tag = pgf_read_tag(rdr);
|
||||
gu_return_on_exn(rdr->err, NULL);
|
||||
switch (pragma->tag) {
|
||||
case PGF_DEP_PRAGMA_HEAD:
|
||||
pragma->index = pgf_read_int(rdr);
|
||||
pragma->label = pgf_read_string(rdr);
|
||||
break;
|
||||
case PGF_DEP_PRAGMA_MOD:
|
||||
pragma->index = pgf_read_int(rdr);
|
||||
pragma->label = pgf_read_string(rdr);
|
||||
break;
|
||||
case PGF_DEP_PRAGMA_REL:
|
||||
pragma->index = pgf_read_int(rdr);
|
||||
pragma->label = NULL;
|
||||
break;
|
||||
case PGF_DEP_PRAGMA_SKIP:
|
||||
pragma->index = 0;
|
||||
pragma->label = NULL;
|
||||
break;
|
||||
case PGF_DEP_PRAGMA_ANCH:
|
||||
pragma->index = 0;
|
||||
pragma->label = NULL;
|
||||
break;
|
||||
default:
|
||||
pgf_read_tag_error(rdr);
|
||||
}
|
||||
}
|
||||
return pragmas;
|
||||
}
|
||||
|
||||
static PgfAbsFun*
|
||||
pgf_read_absfun(PgfReader* rdr, PgfAbstr* abstr, PgfAbsFun* absfun)
|
||||
{
|
||||
@@ -426,6 +465,9 @@ pgf_read_absfun(PgfReader* rdr, PgfAbstr* abstr, PgfAbsFun* absfun)
|
||||
absfun->type = pgf_read_type_(rdr);
|
||||
gu_return_on_exn(rdr->err, NULL);
|
||||
|
||||
absfun->pragmas = pgf_read_deppragmas(rdr);
|
||||
gu_return_on_exn(rdr->err, NULL);
|
||||
|
||||
absfun->arity = pgf_read_int(rdr);
|
||||
|
||||
uint8_t tag = pgf_read_tag(rdr);
|
||||
@@ -549,17 +591,6 @@ pgf_read_abstract(PgfReader* rdr, PgfAbstr* abstract)
|
||||
|
||||
abstract->cats = pgf_read_abscats(rdr, abstract);
|
||||
gu_return_on_exn(rdr->err, );
|
||||
|
||||
abstract->abs_lin_fun = gu_new(PgfAbsFun, rdr->opool);
|
||||
abstract->abs_lin_fun->name = "_";
|
||||
abstract->abs_lin_fun->type = gu_new(PgfType, rdr->opool);
|
||||
abstract->abs_lin_fun->type->hypos = NULL;
|
||||
abstract->abs_lin_fun->type->cid = "_";
|
||||
abstract->abs_lin_fun->type->n_exprs = 0;
|
||||
abstract->abs_lin_fun->arity = 0;
|
||||
abstract->abs_lin_fun->defns = NULL;
|
||||
abstract->abs_lin_fun->ep.prob = INFINITY;
|
||||
abstract->abs_lin_fun->ep.expr = gu_null_variant;
|
||||
}
|
||||
|
||||
static PgfCIdMap*
|
||||
@@ -776,22 +807,38 @@ pgf_read_sequences(PgfReader* rdr)
|
||||
static PgfCncFun*
|
||||
pgf_read_cncfun(PgfReader* rdr, PgfAbstr* abstr, PgfConcr* concr, int funid)
|
||||
{
|
||||
PgfCId name = pgf_read_cid(rdr, rdr->tmp_pool);
|
||||
size_t n_absfuns = pgf_read_len(rdr);
|
||||
GuSeq* absfuns =
|
||||
gu_new_seq(PgfAbsFun*, n_absfuns, rdr->opool);
|
||||
prob_t prob;
|
||||
if (n_absfuns == 0)
|
||||
prob = 0;
|
||||
else {
|
||||
prob = INFINITY;
|
||||
for (size_t i = 0; i < n_absfuns; i++) {
|
||||
PgfCId name = pgf_read_cid(rdr, rdr->tmp_pool);
|
||||
gu_return_on_exn(rdr->err, NULL);
|
||||
|
||||
PgfAbsFun* absfun =
|
||||
gu_seq_binsearch(abstr->funs, pgf_absfun_order, PgfAbsFun, name);
|
||||
|
||||
if (prob > absfun->ep.prob)
|
||||
prob = absfun->ep.prob;
|
||||
|
||||
gu_seq_set(absfuns, PgfAbsFun*, i, absfun);
|
||||
}
|
||||
}
|
||||
|
||||
size_t n_lins = pgf_read_len(rdr);
|
||||
gu_return_on_exn(rdr->err, NULL);
|
||||
|
||||
size_t len = pgf_read_len(rdr);
|
||||
gu_return_on_exn(rdr->err, NULL);
|
||||
|
||||
PgfAbsFun* absfun =
|
||||
gu_seq_binsearch(abstr->funs, pgf_absfun_order, PgfAbsFun, name);
|
||||
|
||||
PgfCncFun* cncfun = gu_new_flex(rdr->opool, PgfCncFun, lins, len);
|
||||
cncfun->absfun = absfun;
|
||||
cncfun->ep = (absfun == NULL) ? NULL : &absfun->ep;
|
||||
PgfCncFun* cncfun = gu_new_flex(rdr->opool, PgfCncFun, lins, n_lins);
|
||||
cncfun->absfuns = absfuns;
|
||||
cncfun->prob = prob;
|
||||
cncfun->funid = funid;
|
||||
cncfun->n_lins = len;
|
||||
cncfun->n_lins = n_lins;
|
||||
|
||||
for (size_t i = 0; i < len; i++) {
|
||||
for (size_t i = 0; i < n_lins; i++) {
|
||||
size_t seqid = pgf_read_int(rdr);
|
||||
gu_return_on_exn(rdr->err, NULL);
|
||||
|
||||
@@ -878,7 +925,6 @@ pgf_read_lindefs(PgfReader* rdr, PgfConcr* concr)
|
||||
ccat->lindefs = gu_new_seq(PgfCncFun*, n_funs, rdr->opool);
|
||||
for (size_t j = 0; j < n_funs; j++) {
|
||||
PgfCncFun* fun = pgf_read_funid(rdr, concr);
|
||||
fun->absfun = concr->abstr->abs_lin_fun;
|
||||
gu_seq_set(ccat->lindefs, PgfCncFun*, j, fun);
|
||||
}
|
||||
}
|
||||
@@ -899,7 +945,6 @@ pgf_read_linrefs(PgfReader* rdr, PgfConcr* concr)
|
||||
ccat->linrefs = gu_new_seq(PgfCncFun*, n_funs, rdr->opool);
|
||||
for (size_t j = 0; j < n_funs; j++) {
|
||||
PgfCncFun* fun = pgf_read_funid(rdr, concr);
|
||||
fun->absfun = concr->abstr->abs_lin_fun;
|
||||
gu_seq_set(ccat->linrefs, PgfCncFun*, j, fun);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -311,6 +311,32 @@ pgf_write_absfun(PgfAbsFun* absfun, PgfWriter* wtr)
|
||||
|
||||
pgf_write_type_(absfun->type, wtr);
|
||||
gu_return_on_exn(wtr->err, );
|
||||
|
||||
size_t n_pragmas = gu_seq_length(absfun->pragmas);
|
||||
for (size_t i = 0; i < n_pragmas; i++) {
|
||||
PgfDepPragma* pragma =
|
||||
gu_seq_index(absfun->pragmas, PgfDepPragma, i);
|
||||
|
||||
pgf_write_tag(pragma->tag, wtr);
|
||||
switch (pragma->tag) {
|
||||
case PGF_DEP_PRAGMA_HEAD:
|
||||
pgf_write_int(pragma->index, wtr);
|
||||
pgf_write_string(pragma->label, wtr);
|
||||
break;
|
||||
case PGF_DEP_PRAGMA_MOD:
|
||||
pgf_write_int(pragma->index, wtr);
|
||||
pgf_write_string(pragma->label, wtr);
|
||||
break;
|
||||
case PGF_DEP_PRAGMA_REL:
|
||||
pgf_write_int(pragma->index, wtr);
|
||||
break;
|
||||
case PGF_DEP_PRAGMA_SKIP:
|
||||
case PGF_DEP_PRAGMA_ANCH:
|
||||
break;
|
||||
default:
|
||||
gu_impossible();
|
||||
}
|
||||
}
|
||||
|
||||
pgf_write_int(absfun->arity, wtr);
|
||||
|
||||
@@ -579,8 +605,15 @@ pgf_write_sequences(PgfSequences* seqs, PgfWriter* wtr)
|
||||
static void
|
||||
pgf_write_cncfun(PgfCncFun* cncfun, PgfConcr* concr, PgfWriter* wtr)
|
||||
{
|
||||
pgf_write_cid(cncfun->absfun->name, wtr);
|
||||
gu_return_on_exn(wtr->err, );
|
||||
size_t n_absfuns = gu_seq_length(cncfun->absfuns);
|
||||
pgf_write_len(n_absfuns, wtr);
|
||||
for (size_t i = 0; i < n_absfuns; i++) {
|
||||
PgfAbsFun* absfun =
|
||||
gu_seq_get(cncfun->absfuns, PgfAbsFun*, i);
|
||||
|
||||
pgf_write_cid(absfun->name, wtr);
|
||||
gu_return_on_exn(wtr->err, );
|
||||
}
|
||||
|
||||
pgf_write_len(cncfun->n_lins, wtr);
|
||||
gu_return_on_exn(wtr->err, );
|
||||
|
||||
@@ -1305,20 +1305,26 @@ sg_update_fts_index(SgSG* sg, PgfPGF* pgf, GuExn* err)
|
||||
for (size_t funid = 0; funid < n_funs; funid++) {
|
||||
PgfCncFun* cncfun = gu_seq_get(concr->cncfuns, PgfCncFun*, funid);
|
||||
|
||||
SgId key = 0;
|
||||
rc = find_function_rowid(sg, &ctxt, cncfun->absfun->name, &key, 1);
|
||||
if (rc != SQLITE_OK) {
|
||||
sg_raise_sqlite(rc, err);
|
||||
goto close;
|
||||
}
|
||||
size_t n_absfuns = gu_seq_length(cncfun->absfuns);
|
||||
for (size_t i = 0; i < n_absfuns; i++) {
|
||||
PgfAbsFun* absfun =
|
||||
gu_seq_get(cncfun->absfuns, PgfAbsFun*, i);
|
||||
|
||||
for (size_t lin_idx = 0; lin_idx < cncfun->n_lins; lin_idx++) {
|
||||
PgfSequence* seq = cncfun->lins[lin_idx];
|
||||
rc = insert_syms(sg, crsTokens, seq->syms, key);
|
||||
SgId key = 0;
|
||||
rc = find_function_rowid(sg, &ctxt, absfun->name, &key, 1);
|
||||
if (rc != SQLITE_OK) {
|
||||
sg_raise_sqlite(rc, err);
|
||||
goto close;
|
||||
}
|
||||
|
||||
for (size_t lin_idx = 0; lin_idx < cncfun->n_lins; lin_idx++) {
|
||||
PgfSequence* seq = cncfun->lins[lin_idx];
|
||||
rc = insert_syms(sg, crsTokens, seq->syms, key);
|
||||
if (rc != SQLITE_OK) {
|
||||
sg_raise_sqlite(rc, err);
|
||||
goto close;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -73,7 +73,7 @@ module PGF2 (-- * PGF
|
||||
MorphoAnalysis, lookupMorpho, fullFormLexicon,
|
||||
-- ** Visualizations
|
||||
GraphvizOptions(..), graphvizDefaults,
|
||||
graphvizAbstractTree, graphvizParseTree, graphvizWordAlignment,
|
||||
graphvizAbstractTree, graphvizParseTree, graphvizDependencyGraph, graphvizWordAlignment,
|
||||
|
||||
-- * Exceptions
|
||||
PGFError(..),
|
||||
@@ -140,14 +140,13 @@ readPGF fpath =
|
||||
|
||||
showPGF :: PGF -> String
|
||||
showPGF p =
|
||||
unsafePerformIO $
|
||||
withGuPool $ \tmpPl ->
|
||||
do (sb,out) <- newOut tmpPl
|
||||
exn <- gu_new_exn tmpPl
|
||||
pgf_print (pgf p) out exn
|
||||
touchPGF p
|
||||
s <- gu_string_buf_freeze sb tmpPl
|
||||
peekUtf8CString s
|
||||
unsafePerformIO $ do
|
||||
tmpPl <- gu_new_pool
|
||||
(sb,out) <- newOut tmpPl
|
||||
exn <- gu_new_exn tmpPl
|
||||
pgf_print (pgf p) out exn
|
||||
touchPGF p
|
||||
peekUtf8CStringBufResult sb tmpPl
|
||||
|
||||
-- | List of all languages available in the grammar.
|
||||
languages :: PGF -> Map.Map ConcName Concr
|
||||
@@ -411,41 +410,48 @@ graphvizDefaults = GraphvizOptions False False False True "" "" "" "" "" ""
|
||||
-- | Renders an abstract syntax tree in a Graphviz format.
|
||||
graphvizAbstractTree :: PGF -> GraphvizOptions -> Expr -> String
|
||||
graphvizAbstractTree p opts e =
|
||||
unsafePerformIO $
|
||||
withGuPool $ \tmpPl ->
|
||||
do (sb,out) <- newOut tmpPl
|
||||
exn <- gu_new_exn tmpPl
|
||||
c_opts <- newGraphvizOptions tmpPl opts
|
||||
pgf_graphviz_abstract_tree (pgf p) (expr e) c_opts out exn
|
||||
touchExpr e
|
||||
s <- gu_string_buf_freeze sb tmpPl
|
||||
peekUtf8CString s
|
||||
|
||||
unsafePerformIO $ do
|
||||
tmpPl <- gu_new_pool
|
||||
(sb,out) <- newOut tmpPl
|
||||
exn <- gu_new_exn tmpPl
|
||||
c_opts <- newGraphvizOptions tmpPl opts
|
||||
pgf_graphviz_abstract_tree (pgf p) (expr e) c_opts out exn
|
||||
touchExpr e
|
||||
peekUtf8CStringBufResult sb tmpPl
|
||||
|
||||
graphvizParseTree :: Concr -> GraphvizOptions -> Expr -> String
|
||||
graphvizParseTree c opts e =
|
||||
unsafePerformIO $
|
||||
withGuPool $ \tmpPl ->
|
||||
do (sb,out) <- newOut tmpPl
|
||||
exn <- gu_new_exn tmpPl
|
||||
c_opts <- newGraphvizOptions tmpPl opts
|
||||
pgf_graphviz_parse_tree (concr c) (expr e) c_opts out exn
|
||||
touchExpr e
|
||||
s <- gu_string_buf_freeze sb tmpPl
|
||||
peekUtf8CString s
|
||||
unsafePerformIO $ do
|
||||
tmpPl <- gu_new_pool
|
||||
(sb,out) <- newOut tmpPl
|
||||
exn <- gu_new_exn tmpPl
|
||||
c_opts <- newGraphvizOptions tmpPl opts
|
||||
pgf_graphviz_parse_tree (concr c) (expr e) c_opts out exn
|
||||
touchExpr e
|
||||
peekUtf8CStringBufResult sb tmpPl
|
||||
|
||||
graphvizDependencyGraph :: PGF -> GraphvizOptions -> Expr -> String
|
||||
graphvizDependencyGraph p opts e =
|
||||
unsafePerformIO $ do
|
||||
tmpPl <- gu_new_pool
|
||||
(sb,out) <- newOut tmpPl
|
||||
exn <- gu_new_exn tmpPl
|
||||
c_opts <- newGraphvizOptions tmpPl opts
|
||||
pgf_graphviz_dependency_graph (pgf p) (expr e) c_opts out exn tmpPl
|
||||
touchExpr e
|
||||
peekUtf8CStringBufResult sb tmpPl
|
||||
|
||||
graphvizWordAlignment :: [Concr] -> GraphvizOptions -> Expr -> String
|
||||
graphvizWordAlignment cs opts e =
|
||||
unsafePerformIO $
|
||||
withGuPool $ \tmpPl ->
|
||||
withArrayLen (map concr cs) $ \n_concrs ptr ->
|
||||
do (sb,out) <- newOut tmpPl
|
||||
do tmpPl <- gu_new_pool
|
||||
(sb,out) <- newOut tmpPl
|
||||
exn <- gu_new_exn tmpPl
|
||||
c_opts <- newGraphvizOptions tmpPl opts
|
||||
pgf_graphviz_word_alignment ptr (fromIntegral n_concrs) (expr e) c_opts out exn
|
||||
touchExpr e
|
||||
s <- gu_string_buf_freeze sb tmpPl
|
||||
peekUtf8CString s
|
||||
peekUtf8CStringBufResult sb tmpPl
|
||||
|
||||
newGraphvizOptions :: Ptr GuPool -> GraphvizOptions -> IO (Ptr PgfGraphvizOptions)
|
||||
newGraphvizOptions pool opts = do
|
||||
@@ -750,8 +756,7 @@ linearize lang e = unsafePerformIO $
|
||||
msg <- peekUtf8CString c_msg
|
||||
throwIO (PGFError msg)
|
||||
else throwIO (PGFError "The abstract tree cannot be linearized")
|
||||
else do lin <- gu_string_buf_freeze sb pl
|
||||
peekUtf8CString lin
|
||||
else do peekUtf8CStringBuf sb
|
||||
|
||||
-- | Generates all possible linearizations of an expression
|
||||
linearizeAll :: Concr -> Expr -> [String]
|
||||
@@ -780,8 +785,7 @@ linearizeAll lang e = unsafePerformIO $
|
||||
if is_nonexist
|
||||
then collect cts exn pl
|
||||
else throwExn exn pl
|
||||
else do lin <- gu_string_buf_freeze sb tmpPl
|
||||
s <- peekUtf8CString lin
|
||||
else do s <- peekUtf8CStringBuf sb
|
||||
ss <- collect cts exn pl
|
||||
return (s:ss)
|
||||
|
||||
@@ -841,8 +845,7 @@ tabularLinearizeAll lang e = unsafePerformIO $
|
||||
if is_nonexist
|
||||
then collectTable lang ctree (lin_idx+1) labels exn tmpPl
|
||||
else throwExn exn
|
||||
else do lin <- gu_string_buf_freeze sb tmpPl
|
||||
s <- peekUtf8CString lin
|
||||
else do s <- peekUtf8CStringBuf sb
|
||||
ss <- collectTable lang ctree (lin_idx+1) labels exn tmpPl
|
||||
return ((label,s):ss)
|
||||
|
||||
|
||||
@@ -252,15 +252,14 @@ foreign import ccall "wrapper"
|
||||
-- of binding.
|
||||
showExpr :: [CId] -> Expr -> String
|
||||
showExpr scope e =
|
||||
unsafePerformIO $
|
||||
withGuPool $ \tmpPl ->
|
||||
do (sb,out) <- newOut tmpPl
|
||||
printCtxt <- newPrintCtxt scope tmpPl
|
||||
exn <- gu_new_exn tmpPl
|
||||
pgf_print_expr (expr e) printCtxt 1 out exn
|
||||
touchExpr e
|
||||
s <- gu_string_buf_freeze sb tmpPl
|
||||
peekUtf8CString s
|
||||
unsafePerformIO $ do
|
||||
tmpPl <- gu_new_pool
|
||||
(sb,out) <- newOut tmpPl
|
||||
printCtxt <- newPrintCtxt scope tmpPl
|
||||
exn <- gu_new_exn tmpPl
|
||||
pgf_print_expr (expr e) printCtxt 1 out exn
|
||||
touchExpr e
|
||||
peekUtf8CStringBufResult sb tmpPl
|
||||
|
||||
newPrintCtxt :: [String] -> Ptr GuPool -> IO (Ptr PgfPrintContext)
|
||||
newPrintCtxt [] pool = return nullPtr
|
||||
|
||||
@@ -15,6 +15,7 @@ import Control.Exception
|
||||
import GHC.Ptr
|
||||
import Data.Int
|
||||
import Data.Word
|
||||
import System.IO.Unsafe
|
||||
|
||||
type Touch = IO ()
|
||||
|
||||
@@ -106,6 +107,12 @@ foreign import ccall unsafe "gu/enum.h gu_enum_next"
|
||||
foreign import ccall unsafe "gu/string.h gu_string_buf_freeze"
|
||||
gu_string_buf_freeze :: Ptr GuStringBuf -> Ptr GuPool -> IO CString
|
||||
|
||||
foreign import ccall unsafe "gu/string.h gu_string_buf_data"
|
||||
gu_string_buf_data :: Ptr GuStringBuf -> IO CString
|
||||
|
||||
foreign import ccall unsafe "gu/string.h gu_string_buf_length"
|
||||
gu_string_buf_length :: Ptr GuStringBuf -> IO CSizeT
|
||||
|
||||
foreign import ccall unsafe "gu/utf8.h gu_utf8_decode"
|
||||
gu_utf8_decode :: Ptr CString -> IO GuUCS
|
||||
|
||||
@@ -186,6 +193,29 @@ peekUtf8CStringLen ptr len =
|
||||
cs <- decode pptr end
|
||||
return (((toEnum . fromEnum) x) : cs)
|
||||
|
||||
peekUtf8CStringBuf :: Ptr GuStringBuf -> IO String
|
||||
peekUtf8CStringBuf sbuf = do
|
||||
ptr <- gu_string_buf_data sbuf
|
||||
len <- gu_string_buf_length sbuf
|
||||
peekUtf8CStringLen ptr (fromIntegral len)
|
||||
|
||||
peekUtf8CStringBufResult :: Ptr GuStringBuf -> Ptr GuPool -> IO String
|
||||
peekUtf8CStringBufResult sbuf pool = do
|
||||
fptr <- newForeignPtr gu_pool_finalizer pool
|
||||
ptr <- gu_string_buf_data sbuf
|
||||
len <- gu_string_buf_length sbuf
|
||||
pptr <- gu_malloc pool (#size GuString*)
|
||||
poke pptr ptr >> decode fptr pptr (ptr `plusPtr` fromIntegral len)
|
||||
where
|
||||
decode fptr pptr end = do
|
||||
ptr <- peek pptr
|
||||
if ptr >= end
|
||||
then return []
|
||||
else do x <- gu_utf8_decode pptr
|
||||
cs <- unsafeInterleaveIO (decode fptr pptr end)
|
||||
touchForeignPtr fptr
|
||||
return (((toEnum . fromEnum) x) : cs)
|
||||
|
||||
pokeUtf8CString :: String -> CString -> IO ()
|
||||
pokeUtf8CString s ptr =
|
||||
alloca $ \pptr ->
|
||||
@@ -518,6 +548,9 @@ foreign import ccall "pgf/graphviz.h pgf_graphviz_abstract_tree"
|
||||
|
||||
foreign import ccall "pgf/graphviz.h pgf_graphviz_parse_tree"
|
||||
pgf_graphviz_parse_tree :: Ptr PgfConcr -> PgfExpr -> Ptr PgfGraphvizOptions -> Ptr GuOut -> Ptr GuExn -> IO ()
|
||||
|
||||
foreign import ccall "pgf/graphviz.h pgf_graphviz_dependency_graph"
|
||||
pgf_graphviz_dependency_graph :: Ptr PgfPGF -> PgfExpr -> Ptr PgfGraphvizOptions -> Ptr GuOut -> Ptr GuExn -> Ptr GuPool -> IO ()
|
||||
|
||||
foreign import ccall "pgf/graphviz.h pgf_graphviz_word_alignment"
|
||||
pgf_graphviz_word_alignment :: Ptr (Ptr PgfConcr) -> CSizeT -> PgfExpr -> Ptr PgfGraphvizOptions -> Ptr GuOut -> Ptr GuExn -> IO ()
|
||||
|
||||
@@ -12,9 +12,6 @@ module PGF2.Internal(-- * Access the internal structures
|
||||
build, eAbs, eApp, eMeta, eFun, eVar, eTyped, eImplArg, dTyp, hypo,
|
||||
AbstrInfo, newAbstr, ConcrInfo, newConcr, newPGF,
|
||||
|
||||
-- * Expose PGF and Concr for FFI with C
|
||||
PGF(..), Concr(..),
|
||||
|
||||
-- * Write an in-memory PGF to a file
|
||||
writePGF
|
||||
) where
|
||||
@@ -197,21 +194,24 @@ concrTotalFuns c = unsafePerformIO $ do
|
||||
touchConcr c
|
||||
return (fromIntegral (c_len :: CSizeT))
|
||||
|
||||
concrFunction :: Concr -> FunId -> (Fun,[SeqId])
|
||||
concrFunction :: Concr -> FunId -> ([Fun],[SeqId])
|
||||
concrFunction c funid = unsafePerformIO $ do
|
||||
c_cncfuns <- (#peek PgfConcr, cncfuns) (concr c)
|
||||
c_cncfun <- peek (c_cncfuns `plusPtr` ((#offset GuSeq, data)+funid*(#size PgfCncFun*)))
|
||||
c_absfun <- (#peek PgfCncFun, absfun) c_cncfun
|
||||
c_name <- (#peek PgfAbsFun, name) c_absfun
|
||||
name <- peekUtf8CString c_name
|
||||
c_absfuns <- (#peek PgfCncFun, absfuns) c_cncfun
|
||||
names <- peekSequence peekAbsName (#size PgfAbsFun*) c_absfuns
|
||||
c_n_lins <- (#peek PgfCncFun, n_lins) c_cncfun
|
||||
arr <- peekArray (fromIntegral (c_n_lins :: CSizeT)) (c_cncfun `plusPtr` (#offset PgfCncFun, lins))
|
||||
seqs_seq <- (#peek PgfConcr, sequences) (concr c)
|
||||
touchConcr c
|
||||
let seqs = seqs_seq `plusPtr` (#offset GuSeq, data)
|
||||
return (name, map (toSeqId seqs) arr)
|
||||
return (names, map (toSeqId seqs) arr)
|
||||
where
|
||||
toSeqId seqs seq = minusPtr seq seqs `div` (#size PgfSequence)
|
||||
|
||||
peekAbsName c_absfun = do
|
||||
c_name <- (#peek PgfAbsFun, name) c_absfun
|
||||
peekUtf8CString c_name
|
||||
|
||||
concrTotalSeqs :: Concr -> SeqId
|
||||
concrTotalSeqs c = unsafePerformIO $ do
|
||||
@@ -448,7 +448,7 @@ newHypos hypos pool = do
|
||||
Implicit -> (#const PGF_BIND_TYPE_IMPLICIT)
|
||||
|
||||
|
||||
data AbstrInfo = AbstrInfo (Ptr GuSeq) (Ptr GuSeq) (Map.Map String (Ptr PgfAbsCat)) (Ptr GuSeq) (Map.Map String (Ptr PgfAbsFun)) (Ptr PgfAbsFun) (Ptr GuBuf) Touch
|
||||
data AbstrInfo = AbstrInfo (Ptr GuSeq) (Ptr GuSeq) (Map.Map String (Ptr PgfAbsCat)) (Ptr GuSeq) (Map.Map String (Ptr PgfAbsFun)) (Ptr GuBuf) Touch
|
||||
|
||||
newAbstr :: (?builder :: Builder s) => [(String,Literal)] ->
|
||||
[(Cat,[B s Hypo],Float)] ->
|
||||
@@ -458,9 +458,8 @@ newAbstr aflags cats funs = unsafePerformIO $ do
|
||||
c_aflags <- newFlags aflags pool
|
||||
(c_cats,abscats) <- newAbsCats (sortByFst3 cats) pool
|
||||
(c_funs,absfuns) <- newAbsFuns (sortByFst4 funs) pool
|
||||
c_abs_lin_fun <- newAbsLinFun
|
||||
c_non_lexical_buf <- gu_make_buf (#size PgfProductionIdxEntry) pool
|
||||
return (AbstrInfo c_aflags c_cats abscats c_funs absfuns c_abs_lin_fun c_non_lexical_buf touch)
|
||||
return (AbstrInfo c_aflags c_cats abscats c_funs absfuns c_non_lexical_buf touch)
|
||||
where
|
||||
(Builder pool touch) = ?builder
|
||||
|
||||
@@ -506,26 +505,6 @@ newAbstr aflags cats funs = unsafePerformIO $ do
|
||||
(#poke PgfAbsFun, ep.prob) ptr (realToFrac prob :: CFloat)
|
||||
return (Map.insert name ptr absfuns)
|
||||
|
||||
newAbsLinFun = do
|
||||
ptr <- gu_malloc_aligned pool
|
||||
(#size PgfAbsFun)
|
||||
(#const gu_alignof(PgfAbsFun))
|
||||
c_wild <- newUtf8CString "_" pool
|
||||
c_ty <- gu_malloc_aligned pool
|
||||
(#size PgfType)
|
||||
(#const gu_alignof(PgfType))
|
||||
(#poke PgfType, hypos) c_ty nullPtr
|
||||
(#poke PgfType, cid) c_ty c_wild
|
||||
(#poke PgfType, n_exprs) c_ty (0 :: CSizeT)
|
||||
(#poke PgfAbsFun, name) ptr c_wild
|
||||
(#poke PgfAbsFun, type) ptr c_ty
|
||||
(#poke PgfAbsFun, arity) ptr (0 :: CSizeT)
|
||||
(#poke PgfAbsFun, defns) ptr nullPtr
|
||||
(#poke PgfAbsFun, ep.prob) ptr (- log 0 :: CFloat)
|
||||
(#poke PgfAbsFun, ep.expr) ptr nullPtr
|
||||
return ptr
|
||||
|
||||
|
||||
data ConcrInfo = ConcrInfo (Ptr GuSeq) (Ptr GuMap) (Ptr GuMap) (Ptr GuSeq) (Ptr GuSeq) (Ptr GuMap) (Ptr PgfConcr -> Ptr GuPool -> IO ()) CInt
|
||||
|
||||
newConcr :: (?builder :: Builder s) => AbstrInfo ->
|
||||
@@ -534,12 +513,12 @@ newConcr :: (?builder :: Builder s) => AbstrInfo ->
|
||||
[(FId,[FunId])] -> -- ^ Lindefs
|
||||
[(FId,[FunId])] -> -- ^ Linrefs
|
||||
[(FId,[Production])] -> -- ^ Productions
|
||||
[(Fun,[SeqId])] -> -- ^ Concrete functions (must be sorted by Fun)
|
||||
[([Fun],[SeqId])] -> -- ^ Concrete functions (must be sorted by Fun)
|
||||
[[Symbol]] -> -- ^ Sequences (must be sorted)
|
||||
[(Cat,FId,FId,[String])] -> -- ^ Concrete categories
|
||||
FId -> -- ^ The total count of the categories
|
||||
ConcrInfo
|
||||
newConcr (AbstrInfo _ _ abscats _ absfuns c_abs_lin_fun c_non_lexical_buf _) cflags printnames lindefs linrefs prods cncfuns sequences cnccats total_cats = unsafePerformIO $ do
|
||||
newConcr (AbstrInfo _ _ abscats _ absfuns c_non_lexical_buf _) cflags printnames lindefs linrefs prods cncfuns sequences cnccats total_cats = unsafePerformIO $ do
|
||||
c_cflags <- newFlags cflags pool
|
||||
c_printname <- newMap (#size GuString) gu_string_hasher newUtf8CString
|
||||
(#size GuString) (pokeString pool)
|
||||
@@ -600,7 +579,6 @@ newConcr (AbstrInfo _ _ abscats _ absfuns c_abs_lin_fun c_non_lexical_buf _) cf
|
||||
|
||||
pokeRefDefFunId funs_ptr ptr funid = do
|
||||
let c_fun = funs_ptr `plusPtr` (funid * (#size PgfCncFun))
|
||||
(#poke PgfCncFun, absfun) c_fun c_abs_lin_fun
|
||||
poke ptr c_fun
|
||||
|
||||
pokeCncCat c_ccats ptr (name,start,end,labels) = do
|
||||
@@ -632,7 +610,7 @@ newPGF :: (?builder :: Builder s) => [(String,Literal)] ->
|
||||
AbstrInfo ->
|
||||
[(ConcName,ConcrInfo)] ->
|
||||
B s PGF
|
||||
newPGF gflags absname (AbstrInfo c_aflags c_cats _ c_funs _ c_abs_lin_fun _ _) concrs =
|
||||
newPGF gflags absname (AbstrInfo c_aflags c_cats _ c_funs _ _ _) concrs =
|
||||
unsafePerformIO $ do
|
||||
ptr <- gu_malloc_aligned pool
|
||||
(#size PgfPGF)
|
||||
@@ -648,7 +626,6 @@ newPGF gflags absname (AbstrInfo c_aflags c_cats _ c_funs _ c_abs_lin_fun _ _) c
|
||||
(#poke PgfPGF, abstract.aflags) ptr c_aflags
|
||||
(#poke PgfPGF, abstract.funs) ptr c_funs
|
||||
(#poke PgfPGF, abstract.cats) ptr c_cats
|
||||
(#poke PgfPGF, abstract.abs_lin_fun) ptr c_abs_lin_fun
|
||||
(#poke PgfPGF, concretes) ptr c_concrs
|
||||
(#poke PgfPGF, pool) ptr pool
|
||||
return (B (PGF ptr touch))
|
||||
@@ -754,19 +731,18 @@ newProduction c_ccats funs_ptr c_non_lexical_buf (PCoerce fid) pool =
|
||||
return (0,c_prod)
|
||||
|
||||
|
||||
newCncFun absfuns seqs_ptr (funid,(fun,seqids)) pool =
|
||||
do let c_absfun = fromMaybe nullPtr (Map.lookup fun absfuns)
|
||||
c_ep = if c_absfun == nullPtr
|
||||
then nullPtr
|
||||
else c_absfun `plusPtr` (#offset PgfAbsFun, ep)
|
||||
n_lins = fromIntegral (length seqids) :: CSizeT
|
||||
newCncFun absfuns seqs_ptr (funid,(funs,seqids)) pool =
|
||||
do let absfun_ptrs = [ptr | fun <- funs, Just ptr <- [Map.lookup fun absfuns]]
|
||||
n_lins = fromIntegral (length seqids) :: CSizeT
|
||||
ptr <- gu_malloc_aligned pool
|
||||
((#size PgfCncFun)+n_lins*(#size PgfSequence*))
|
||||
(#const gu_flex_alignof(PgfCncFun))
|
||||
(#poke PgfCncFun, absfun) ptr c_absfun
|
||||
(#poke PgfCncFun, ep) ptr c_ep
|
||||
(#poke PgfCncFun, funid) ptr (funid :: CInt)
|
||||
(#poke PgfCncFun, n_lins) ptr n_lins
|
||||
c_absfuns <- newSequence (#size PgfAbsFun*) poke absfun_ptrs pool
|
||||
c_prob <- fmap (minimum . (0:)) $ mapM (#peek PgfAbsFun, ep.prob) absfun_ptrs
|
||||
(#poke PgfCncFun, absfuns) ptr c_absfuns
|
||||
(#poke PgfCncFun, prob) ptr (c_prob :: CFloat)
|
||||
(#poke PgfCncFun, funid) ptr (funid :: CInt)
|
||||
(#poke PgfCncFun, n_lins) ptr n_lins
|
||||
pokeSequences seqs_ptr (ptr `plusPtr` (#offset PgfCncFun, lins)) seqids
|
||||
return ptr
|
||||
where
|
||||
@@ -775,6 +751,7 @@ newCncFun absfuns seqs_ptr (funid,(fun,seqids)) pool =
|
||||
poke ptr (seqs_ptr `plusPtr` (seqid * (#size PgfSequence)))
|
||||
pokeSequences seqs_ptr (ptr `plusPtr` (#size PgfSequence*)) seqids
|
||||
|
||||
|
||||
getCCat c_ccats fid pool =
|
||||
alloca $ \pfid -> do
|
||||
poke pfid (fromIntegral fid :: CInt)
|
||||
|
||||
@@ -45,15 +45,14 @@ readType str =
|
||||
-- of binding.
|
||||
showType :: [CId] -> Type -> String
|
||||
showType scope (Type ty touch) =
|
||||
unsafePerformIO $
|
||||
withGuPool $ \tmpPl ->
|
||||
do (sb,out) <- newOut tmpPl
|
||||
printCtxt <- newPrintCtxt scope tmpPl
|
||||
exn <- gu_new_exn tmpPl
|
||||
pgf_print_type ty printCtxt 0 out exn
|
||||
touch
|
||||
s <- gu_string_buf_freeze sb tmpPl
|
||||
peekUtf8CString s
|
||||
unsafePerformIO $ do
|
||||
tmpPl <- gu_new_pool
|
||||
(sb,out) <- newOut tmpPl
|
||||
printCtxt <- newPrintCtxt scope tmpPl
|
||||
exn <- gu_new_exn tmpPl
|
||||
pgf_print_type ty printCtxt 0 out exn
|
||||
touch
|
||||
peekUtf8CStringBufResult sb tmpPl
|
||||
|
||||
-- | creates a type from a list of hypothesises, a category and
|
||||
-- a list of arguments for the category. The operation
|
||||
@@ -129,13 +128,12 @@ unType (Type c_type touch) = unsafePerformIO $ do
|
||||
-- of binding.
|
||||
showContext :: [CId] -> [Hypo] -> String
|
||||
showContext scope hypos =
|
||||
unsafePerformIO $
|
||||
withGuPool $ \tmpPl ->
|
||||
do (sb,out) <- newOut tmpPl
|
||||
c_hypos <- newSequence (#size PgfHypo) (pokeHypo tmpPl) hypos tmpPl
|
||||
printCtxt <- newPrintCtxt scope tmpPl
|
||||
exn <- gu_new_exn tmpPl
|
||||
pgf_print_context c_hypos printCtxt out exn
|
||||
mapM_ touchHypo hypos
|
||||
s <- gu_string_buf_freeze sb tmpPl
|
||||
peekUtf8CString s
|
||||
unsafePerformIO $ do
|
||||
tmpPl <- gu_new_pool
|
||||
(sb,out) <- newOut tmpPl
|
||||
c_hypos <- newSequence (#size PgfHypo) (pokeHypo tmpPl) hypos tmpPl
|
||||
printCtxt <- newPrintCtxt scope tmpPl
|
||||
exn <- gu_new_exn tmpPl
|
||||
pgf_print_context c_hypos printCtxt out exn
|
||||
mapM_ touchHypo hypos
|
||||
peekUtf8CStringBufResult sb tmpPl
|
||||
|
||||
@@ -196,18 +196,17 @@ readTriple str =
|
||||
showTriple :: Expr -> Expr -> Expr -> String
|
||||
showTriple (Expr expr1 touch1) (Expr expr2 touch2) (Expr expr3 touch3) =
|
||||
unsafePerformIO $
|
||||
withGuPool $ \tmpPl ->
|
||||
withTriple $ \triple -> do
|
||||
(sb,out) <- newOut tmpPl
|
||||
let printCtxt = nullPtr
|
||||
exn <- gu_new_exn tmpPl
|
||||
pokeElemOff triple 0 expr1
|
||||
pokeElemOff triple 1 expr2
|
||||
pokeElemOff triple 2 expr3
|
||||
pgf_print_expr_tuple 3 triple printCtxt out exn
|
||||
touch1 >> touch2 >> touch3
|
||||
s <- gu_string_buf_freeze sb tmpPl
|
||||
peekUtf8CString s
|
||||
withTriple $ \triple -> do
|
||||
tmpPl <- gu_new_pool
|
||||
(sb,out) <- newOut tmpPl
|
||||
let printCtxt = nullPtr
|
||||
exn <- gu_new_exn tmpPl
|
||||
pokeElemOff triple 0 expr1
|
||||
pokeElemOff triple 1 expr2
|
||||
pokeElemOff triple 2 expr3
|
||||
pgf_print_expr_tuple 3 triple printCtxt out exn
|
||||
touch1 >> touch2 >> touch3
|
||||
peekUtf8CStringBufResult sb tmpPl
|
||||
|
||||
insertTriple :: SG -> Expr -> Expr -> Expr -> IO SgId
|
||||
insertTriple (SG sg) (Expr expr1 touch1) (Expr expr2 touch2) (Expr expr3 touch3) =
|
||||
|
||||
@@ -335,8 +335,8 @@ functionsByCat pgf cat =
|
||||
|
||||
functionType pgf fun =
|
||||
case Map.lookup fun (funs (abstract pgf)) of
|
||||
Just (ty,_,_,_) -> Just ty
|
||||
Nothing -> Nothing
|
||||
Just (ty,_,_,_,_) -> Just ty
|
||||
Nothing -> Nothing
|
||||
|
||||
-- | Converts an expression to normal form
|
||||
compute :: PGF -> Expr -> Expr
|
||||
@@ -363,20 +363,20 @@ browse :: PGF -> CId -> Maybe (String,[CId],[CId])
|
||||
browse pgf id = fmap (\def -> (def,producers,consumers)) definition
|
||||
where
|
||||
definition = case Map.lookup id (funs (abstract pgf)) of
|
||||
Just (ty,_,Just (eqs,_),_) -> Just $ render (text "fun" <+> ppCId id <+> colon <+> ppType 0 [] ty $$
|
||||
Just (ty,_,_,Just (eqs,_),_) -> Just $ render (text "fun" <+> ppCId id <+> colon <+> ppType 0 [] ty $$
|
||||
if null eqs
|
||||
then empty
|
||||
else text "def" <+> vcat [let scope = foldl pattScope [] patts
|
||||
ds = map (ppPatt 9 scope) patts
|
||||
in ppCId id <+> hsep ds <+> char '=' <+> ppExpr 0 scope res | Equ patts res <- eqs])
|
||||
Just (ty,_,Nothing,_) -> Just $ render (text "data" <+> ppCId id <+> colon <+> ppType 0 [] ty)
|
||||
Just (ty,_,_,Nothing,_) -> Just $ render (text "data" <+> ppCId id <+> colon <+> ppType 0 [] ty)
|
||||
Nothing -> case Map.lookup id (cats (abstract pgf)) of
|
||||
Just (hyps,_,_) -> Just $ render (text "cat" <+> ppCId id <+> hsep (snd (mapAccumL (ppHypo 4) [] hyps)))
|
||||
Nothing -> Nothing
|
||||
|
||||
(producers,consumers) = Map.foldWithKey accum ([],[]) (funs (abstract pgf))
|
||||
where
|
||||
accum f (ty,_,_,_) (plist,clist) =
|
||||
accum f (ty,_,_,_,_) (plist,clist) =
|
||||
let !plist' = if id `elem` ps then f : plist else plist
|
||||
!clist' = if id `elem` cs then f : clist else clist
|
||||
in (plist',clist')
|
||||
|
||||
@@ -47,13 +47,13 @@ instance Binary CId where
|
||||
|
||||
instance Binary Abstr where
|
||||
put abs = do put (aflags abs)
|
||||
put (Map.map (\(ty,arity,mb_eq,prob) -> (ty,arity,fmap fst mb_eq,prob)) (funs abs))
|
||||
put (Map.map (\(ty,ps,arity,mb_eq,prob) -> (ty,ps,arity,fmap fst mb_eq,prob)) (funs abs))
|
||||
put (cats abs)
|
||||
get = do aflags <- get
|
||||
funs <- get
|
||||
cats <- get
|
||||
return (Abstr{ aflags=aflags
|
||||
, funs=Map.map (\(ty,arity,mb_eq,prob) -> (ty,arity,fmap (\eq -> (eq,[])) mb_eq,prob)) funs
|
||||
, funs=Map.map (\(ty,ps,arity,mb_eq,prob) -> (ty,ps,arity,fmap (\eq -> (eq,[])) mb_eq,prob)) funs
|
||||
, cats=cats
|
||||
})
|
||||
|
||||
@@ -199,6 +199,26 @@ instance Binary BindType where
|
||||
1 -> return Implicit
|
||||
_ -> decodingError
|
||||
|
||||
instance Binary DepPragma where
|
||||
put (Head index lbl) = putWord8 0 >> put index >> put lbl
|
||||
put (Mod index lbl) = putWord8 1 >> put index >> put lbl
|
||||
put (Rel index) = putWord8 2 >> put index
|
||||
put Skip = putWord8 3
|
||||
put Anch = putWord8 4
|
||||
get = do
|
||||
tag <- getWord8
|
||||
case tag of
|
||||
0 -> do index <- get
|
||||
lbl <- get
|
||||
return (Head index lbl)
|
||||
1 -> do index <- get
|
||||
lbl <- get
|
||||
return (Mod index lbl)
|
||||
2 -> do index <- get
|
||||
return (Rel index)
|
||||
3 -> return Skip
|
||||
4 -> return Anch
|
||||
|
||||
instance Binary CncFun where
|
||||
put (CncFun fun lins) = put fun >> putArray lins
|
||||
get = liftM2 CncFun get getArray
|
||||
|
||||
@@ -28,7 +28,7 @@ data PGF = PGF {
|
||||
|
||||
data Abstr = Abstr {
|
||||
aflags :: Map.Map CId Literal, -- ^ value of a flag
|
||||
funs :: Map.Map CId (Type,Int,Maybe ([Equation],[[Instr]]),Double),-- ^ type, arrity and definition of function + probability
|
||||
funs :: Map.Map CId (Type,[DepPragma],Int,Maybe ([Equation],[[Instr]]),Double), -- ^ type, pragmas, arrity and definition of function + probability
|
||||
cats :: Map.Map CId ([Hypo],[(Double, CId)],Double) -- ^ 1. context of a category
|
||||
-- 2. functions of a category. The functions are stored
|
||||
-- in decreasing probability order.
|
||||
@@ -74,7 +74,7 @@ data Production
|
||||
deriving (Eq,Ord,Show)
|
||||
data PArg = PArg [(FId,FId)] {-# UNPACK #-} !FId deriving (Eq,Ord,Show)
|
||||
data CncCat = CncCat {-# UNPACK #-} !FId {-# UNPACK #-} !FId {-# UNPACK #-} !(Array LIndex String)
|
||||
data CncFun = CncFun CId {-# UNPACK #-} !(UArray LIndex SeqId) deriving (Eq,Ord,Show)
|
||||
data CncFun = CncFun [CId] {-# UNPACK #-} !(UArray LIndex SeqId) deriving (Eq,Ord,Show)
|
||||
type Sequence = Array DotPos Symbol
|
||||
type FunId = Int
|
||||
type SeqId = Int
|
||||
@@ -105,8 +105,8 @@ emptyPGF = PGF {
|
||||
haveSameFunsPGF :: PGF -> PGF -> Bool
|
||||
haveSameFunsPGF one two =
|
||||
let
|
||||
fsone = [(f,t) | (f,(t,_,_,_)) <- Map.toList (funs (abstract one))]
|
||||
fstwo = [(f,t) | (f,(t,_,_,_)) <- Map.toList (funs (abstract two))]
|
||||
fsone = [(f,t) | (f,(t,_,_,_,_)) <- Map.toList (funs (abstract one))]
|
||||
fstwo = [(f,t) | (f,(t,_,_,_,_)) <- Map.toList (funs (abstract two))]
|
||||
in fsone == fstwo
|
||||
|
||||
-- | This is just a 'CId' with the language name.
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
module PGF.Expr(Tree, BindType(..), Expr(..), Literal(..), Patt(..), Equation(..),
|
||||
module PGF.Expr(Tree, BindType(..), Expr(..), Literal(..), Patt(..), Equation(..), DepPragma(..),
|
||||
readExpr, showExpr, pExpr, pBinds, ppExpr, ppPatt, pattScope,
|
||||
|
||||
mkAbs, unAbs,
|
||||
@@ -77,6 +77,14 @@ data Equation =
|
||||
Equ [Patt] Expr
|
||||
deriving Show
|
||||
|
||||
data DepPragma
|
||||
= Head Int String
|
||||
| Mod Int String
|
||||
| Rel Int
|
||||
| Skip
|
||||
| Anch
|
||||
|
||||
|
||||
-- | parses 'String' as an expression
|
||||
readExpr :: String -> Maybe Expr
|
||||
readExpr s = case [x | (x,cs) <- RP.readP_to_S pExpr s, all isSpace cs] of
|
||||
@@ -319,15 +327,15 @@ data Value
|
||||
| VClosure Env Expr
|
||||
| VImplArg Value
|
||||
|
||||
type Sig = ( Map.Map CId (Type,Int,Maybe ([Equation],[[Instr]]),Double) -- type and def of a fun
|
||||
, Int -> Maybe Expr -- lookup for metavariables
|
||||
type Sig = ( Map.Map CId (Type,[DepPragma],Int,Maybe ([Equation],[[Instr]]),Double) -- type and def of a fun
|
||||
, Int -> Maybe Expr -- lookup for metavariables
|
||||
)
|
||||
type Env = [Value]
|
||||
|
||||
eval :: Sig -> Env -> Expr -> Value
|
||||
eval sig env (EVar i) = env !! i
|
||||
eval sig env (EFun f) = case Map.lookup f (fst sig) of
|
||||
Just (_,a,meqs,_) -> case meqs of
|
||||
Just (_,_,a,meqs,_) -> case meqs of
|
||||
Just (eqs,_)
|
||||
-> if a == 0
|
||||
then case eqs of
|
||||
@@ -349,12 +357,12 @@ apply :: Sig -> Env -> Expr -> [Value] -> Value
|
||||
apply sig env e [] = eval sig env e
|
||||
apply sig env (EVar i) vs = applyValue sig (env !! i) vs
|
||||
apply sig env (EFun f) vs = case Map.lookup f (fst sig) of
|
||||
Just (_,a,meqs,_) -> case meqs of
|
||||
Just (eqs,_) -> if a <= length vs
|
||||
then match sig f eqs vs
|
||||
else VApp f vs
|
||||
Nothing -> VApp f vs
|
||||
Nothing -> error ("unknown function "++showCId f)
|
||||
Just (_,_,a,meqs,_) -> case meqs of
|
||||
Just (eqs,_) -> if a <= length vs
|
||||
then match sig f eqs vs
|
||||
else VApp f vs
|
||||
Nothing -> VApp f vs
|
||||
Nothing -> error ("unknown function "++showCId f)
|
||||
apply sig env (EApp e1 e2) vs = apply sig env e1 (eval sig env e2 : vs)
|
||||
apply sig env (EAbs b x e) (v:vs) = case (b,v) of
|
||||
(Implicit,VImplArg v) -> apply sig (v:env) e vs
|
||||
|
||||
@@ -71,11 +71,11 @@ bracketedTokn dp f@(Forest abs cnc forest root) =
|
||||
in (ct,fid',fun,es,(map getVar hypos,lin))
|
||||
Nothing -> error ("wrong forest id " ++ show fid)
|
||||
where
|
||||
descend forest (PApply funid args) = let (CncFun fun _lins) = cncfuns cnc ! funid
|
||||
cat = case isLindefCId fun of
|
||||
Just cat -> cat
|
||||
Nothing -> case Map.lookup fun (funs abs) of
|
||||
Just (DTyp _ cat _,_,_,_) -> cat
|
||||
descend forest (PApply funid args) = let (CncFun pfuns _lins) = cncfuns cnc ! funid
|
||||
cat = case pfuns of
|
||||
[] -> wildCId
|
||||
(pfun:_) -> case Map.lookup pfun (funs abs) of
|
||||
Just (DTyp _ cat _,_,_,_,_) -> cat
|
||||
largs = map (render forest) args
|
||||
ltable = mkLinTable cnc isTrusted [] funid largs
|
||||
in ((cat,fid),0,wildCId,either (const []) id $ getAbsTrees f arg Nothing dp,ltable)
|
||||
@@ -103,14 +103,6 @@ bracketedTokn dp f@(Forest abs cnc forest root) =
|
||||
descend (PCoerce fid) = trustedSpots parents' (PArg [] fid)
|
||||
descend (PConst c e _) = IntSet.empty
|
||||
|
||||
isLindefCId id
|
||||
| take l s == lindef = Just (mkCId (drop l s))
|
||||
| otherwise = Nothing
|
||||
where
|
||||
s = showCId id
|
||||
lindef = "lindef "
|
||||
l = length lindef
|
||||
|
||||
-- | This function extracts the list of all completed parse trees
|
||||
-- that spans the whole input consumed so far. The trees are also
|
||||
-- limited by the category specified, which is usually
|
||||
@@ -132,13 +124,13 @@ getAbsTrees (Forest abs cnc forest root) arg@(PArg _ fid) ty dp =
|
||||
| otherwise = do fid0 <- get
|
||||
put fid
|
||||
x <- foldForest (\funid args trees ->
|
||||
do let CncFun fn _lins = cncfuns cnc ! funid
|
||||
case isLindefCId fn of
|
||||
Just _ -> do arg <- go (Set.insert fid rec_) scope mb_tty (head args)
|
||||
do let CncFun fns _lins = cncfuns cnc ! funid
|
||||
case fns of
|
||||
[] -> do arg <- go (Set.insert fid rec_) scope mb_tty (head args)
|
||||
return (mkAbs arg)
|
||||
Nothing -> do ty_fn <- lookupFunType fn
|
||||
fns -> do ty_fn <- lookupFunType (head fns)
|
||||
(e,tty0) <- foldM (\(e1,tty) arg -> goArg (Set.insert fid rec_) scope fid e1 arg tty)
|
||||
(EFun fn,TTyp [] ty_fn) args
|
||||
(EFun (head fns),TTyp [] ty_fn) args
|
||||
case mb_tty of
|
||||
Just tty -> do i <- newGuardedMeta e
|
||||
eqType scope (scopeSize scope) i tty tty0
|
||||
|
||||
@@ -109,7 +109,7 @@ linTree pgf cnc e = nub (map snd (lin Nothing 0 e [] [] e []))
|
||||
Nothing -> concat [toApp fid prod | (fid,set) <- IntMap.toList prods, prod <- Set.toList set]
|
||||
where
|
||||
toApp fid (PApply funid pargs) =
|
||||
let Just (ty,_,_,_) = Map.lookup f (funs (abstract pgf))
|
||||
let Just (ty,_,_,_,_) = Map.lookup f (funs (abstract pgf))
|
||||
(args,res) = catSkeleton ty
|
||||
in [(funid,(res,fid),zip args [fid | PArg _ fid <- pargs])]
|
||||
toApp _ (PCoerce fid) =
|
||||
|
||||
@@ -22,13 +22,13 @@ mapConcretes f pgf = pgf { concretes = Map.map f (concretes pgf) }
|
||||
lookType :: Abstr -> CId -> Type
|
||||
lookType abs f =
|
||||
case lookMap (error $ "lookType " ++ show f) f (funs abs) of
|
||||
(ty,_,_,_) -> ty
|
||||
(ty,_,_,_,_) -> ty
|
||||
|
||||
isData :: Abstr -> CId -> Bool
|
||||
isData abs f =
|
||||
case Map.lookup f (funs abs) of
|
||||
Just (_,_,Nothing,_) -> True -- the encoding of data constrs
|
||||
_ -> False
|
||||
Just (_,_,_,Nothing,_) -> True -- the encoding of data constrs
|
||||
_ -> False
|
||||
|
||||
lookValCat :: Abstr -> CId -> CId
|
||||
lookValCat abs = valCat . lookType abs
|
||||
@@ -61,7 +61,7 @@ lookConcrFlag pgf lang f = Map.lookup f $ cflags $ lookConcr pgf lang
|
||||
|
||||
functionsToCat :: PGF -> CId -> [(CId,Type)]
|
||||
functionsToCat pgf cat =
|
||||
[(f,ty) | (_,f) <- fs, Just (ty,_,_,_) <- [Map.lookup f $ funs $ abstract pgf]]
|
||||
[(f,ty) | (_,f) <- fs, Just (ty,_,_,_,_) <- [Map.lookup f $ funs $ abstract pgf]]
|
||||
where
|
||||
(_,fs,_) = lookMap ([],[],0) cat $ cats $ abstract pgf
|
||||
|
||||
|
||||
@@ -31,7 +31,8 @@ collectWords pinfo = Map.fromListWith (++)
|
||||
[(t, [(fun,lbls ! l)]) | (CncCat s e lbls) <- Map.elems (cnccats pinfo)
|
||||
, fid <- [s..e]
|
||||
, PApply funid _ <- maybe [] Set.toList (IntMap.lookup fid (productions pinfo))
|
||||
, let CncFun fun lins = cncfuns pinfo ! funid
|
||||
, let CncFun funs lins = cncfuns pinfo ! funid
|
||||
, fun <- funs
|
||||
, (l,seqid) <- assocs lins
|
||||
, sym <- elems (sequences pinfo ! seqid)
|
||||
, t <- sym2tokns sym]
|
||||
|
||||
@@ -39,7 +39,7 @@ getAbstract =
|
||||
funs <- getMap getCId getFun
|
||||
cats <- getMap getCId getCat
|
||||
return (Abstr{ aflags=aflags
|
||||
, funs=fmap (\(w,x,y,z) -> (w,x,fmap (flip (,) []) y,z)) funs
|
||||
, funs=fmap (\(w,x,y,z) -> (w,[],x,fmap (flip (,) []) y,z)) funs
|
||||
, cats=fmap (\(x,y) -> (x,y,0)) cats
|
||||
})
|
||||
getFun :: Get (Type,Int,Maybe [Equation],Double)
|
||||
@@ -60,7 +60,7 @@ getConcr =
|
||||
cnccats <- getMap getCId getCncCat
|
||||
totalCats <- get
|
||||
let rseq = listToArray [SymCat 0 0]
|
||||
rfun = CncFun (mkCId "linref") (listToArray [scnt])
|
||||
rfun = CncFun [mkCId "linref"] (listToArray [scnt])
|
||||
linrefs = IntMap.fromList [(i,[fcnt])|i<-[0..totalCats-1]]
|
||||
return (Concr{ cflags=cflags, printnames=printnames
|
||||
, sequences=toArray (scnt+1,seqs++[rseq])
|
||||
@@ -110,7 +110,7 @@ getBindType =
|
||||
1 -> return Implicit
|
||||
_ -> decodingError "getBindType"
|
||||
|
||||
getCncFun = liftM2 CncFun getCId (getArray get)
|
||||
getCncFun = liftM2 CncFun (fmap (:[]) getCId) (getArray get)
|
||||
|
||||
getCncCat = liftM3 CncCat get get (getArray get)
|
||||
|
||||
|
||||
@@ -253,7 +253,7 @@ updateConcrete abs cnc =
|
||||
, prod <- Set.toList prods
|
||||
, fun <- getFunctions prod]
|
||||
where
|
||||
getFunctions (PApply funid args) = let CncFun fun _ = cncfuns cnc ! funid in [fun]
|
||||
getFunctions (PApply funid args) = let CncFun funs _ = cncfuns cnc ! funid in funs
|
||||
getFunctions (PCoerce fid) = case IntMap.lookup fid productions of
|
||||
Nothing -> []
|
||||
Just prods -> [fun | prod <- Set.toList prods, fun <- getFunctions prod]
|
||||
|
||||
@@ -53,7 +53,7 @@ fromDef pgf t@(Fun f ts) = defDown t ++ defUp t where
|
||||
isClosed d || (length equs == 1 && isLinear d)]
|
||||
|
||||
equss = [(f,[(Fun f (map patt2tree ps), expr2tree d) | (Equ ps d) <- eqs]) |
|
||||
(f,(_,_,Just (eqs,_),_)) <- Map.assocs (funs (abstract pgf)), not (null eqs)]
|
||||
(f,(_,_,_,Just (eqs,_),_)) <- Map.assocs (funs (abstract pgf)), not (null eqs)]
|
||||
---- AR 14/12/2010: (expr2tree d) fails unless we send the variable list from ps in eqs;
|
||||
---- cf. PGF.Tree.expr2tree
|
||||
trequ s f e = True ----trace (s ++ ": " ++ show f ++ " " ++ show e) True
|
||||
|
||||
@@ -503,14 +503,14 @@ type Continuation = TrieMap.TrieMap Token ActiveSet
|
||||
-- | Return the Continuation of a Parsestate with exportable types
|
||||
-- Used by PGFService
|
||||
getContinuationInfo :: ParseState -> Map.Map [Token] [(FunId, CId, String)]
|
||||
getContinuationInfo pstate = Map.map (map f . Set.toList) contMap
|
||||
getContinuationInfo pstate = Map.map (concatMap f . Set.toList) contMap
|
||||
where
|
||||
PState _abstr concr _chart cont = pstate
|
||||
contMap = Map.fromList (TrieMap.toList cont) -- always get [([], _::ActiveSet)]
|
||||
f :: Active -> (FunId,CId,String)
|
||||
f (Active int dotpos funid seqid pargs ak) = (funid, cid, seq)
|
||||
f :: Active -> [(FunId,CId,String)]
|
||||
f (Active int dotpos funid seqid pargs ak) = [(funid, fn, seq) | fn <- fns]
|
||||
where
|
||||
CncFun cid _ = cncfuns concr ! funid
|
||||
CncFun fns _ = cncfuns concr ! funid
|
||||
seq = showSeq dotpos (sequences concr ! seqid)
|
||||
|
||||
showSeq :: DotPos -> Sequence -> String
|
||||
|
||||
@@ -31,15 +31,15 @@ ppFlag flag value = text "flag" <+> ppCId flag <+> char '=' <+> ppLit value <+>
|
||||
ppCat :: CId -> ([Hypo],[(Double,CId)],Double) -> Doc
|
||||
ppCat c (hyps,_,_) = text "cat" <+> ppCId c <+> hsep (snd (mapAccumL (ppHypo 4) [] hyps)) <+> char ';'
|
||||
|
||||
ppFun :: CId -> (Type,Int,Maybe ([Equation],[[Instr]]),Double) -> Doc
|
||||
ppFun f (t,_,Just (eqs,code),_) = text "fun" <+> ppCId f <+> colon <+> ppType 0 [] t <+> char ';' $$
|
||||
(if null eqs
|
||||
then empty
|
||||
else text "def" <+> vcat [let scope = foldl pattScope [] patts
|
||||
ds = map (ppPatt 9 scope) patts
|
||||
in ppCId f <+> hsep ds <+> char '=' <+> ppExpr 0 scope res <+> char ';' | Equ patts res <- eqs]) $$
|
||||
ppCode 0 code
|
||||
ppFun f (t,_,Nothing,_) = text "data" <+> ppCId f <+> colon <+> ppType 0 [] t <+> char ';'
|
||||
ppFun :: CId -> (Type,[DepPragma],Int,Maybe ([Equation],[[Instr]]),Double) -> Doc
|
||||
ppFun f (t,_,_,Just (eqs,code),_) = text "fun" <+> ppCId f <+> colon <+> ppType 0 [] t <+> char ';' $$
|
||||
(if null eqs
|
||||
then empty
|
||||
else text "def" <+> vcat [let scope = foldl pattScope [] patts
|
||||
ds = map (ppPatt 9 scope) patts
|
||||
in ppCId f <+> hsep ds <+> char '=' <+> ppExpr 0 scope res <+> char ';' | Equ patts res <- eqs]) $$
|
||||
ppCode 0 code
|
||||
ppFun f (t,_,_,Nothing,_) = text "data" <+> ppCId f <+> colon <+> ppType 0 [] t <+> char ';'
|
||||
|
||||
ppCnc :: Language -> Concr -> Doc
|
||||
ppCnc name cnc =
|
||||
@@ -73,8 +73,8 @@ ppProduction (fid,PCoerce arg) =
|
||||
ppProduction (fid,PConst _ _ ss) =
|
||||
ppFId fid <+> text "->" <+> ppStrs ss
|
||||
|
||||
ppCncFun (funid,CncFun fun arr) =
|
||||
ppFunId funid <+> text ":=" <+> parens (hcat (punctuate comma (map ppSeqId (elems arr)))) <+> brackets (ppCId fun)
|
||||
ppCncFun (funid,CncFun funs arr) =
|
||||
ppFunId funid <+> text ":=" <+> parens (hcat (punctuate comma (map ppSeqId (elems arr)))) <+> brackets (hsep (map ppCId funs))
|
||||
|
||||
ppLinDefs (fid,funids) =
|
||||
[ppFId fid <+> text "->" <+> ppFunId funid <> brackets (ppFId fidVar) | funid <- funids]
|
||||
|
||||
@@ -76,15 +76,15 @@ defaultProbabilities pgf = mkProbabilities pgf Map.empty
|
||||
|
||||
getProbabilities :: PGF -> Probabilities
|
||||
getProbabilities pgf = Probs {
|
||||
funProbs = Map.map (\(_,_,_,p) -> p ) (funs (abstract pgf)),
|
||||
catProbs = Map.map (\(_,fns,p) -> (p,fns)) (cats (abstract pgf))
|
||||
funProbs = Map.map (\(_,_,_,_,p) -> p ) (funs (abstract pgf)),
|
||||
catProbs = Map.map (\(_,fns,p) -> (p,fns)) (cats (abstract pgf))
|
||||
}
|
||||
|
||||
setProbabilities :: Probabilities -> PGF -> PGF
|
||||
setProbabilities probs pgf = pgf {
|
||||
abstract = (abstract pgf) {
|
||||
funs = mapUnionWith (\(ty,a,df,_) p -> (ty,a,df, p)) (funs (abstract pgf)) (funProbs probs),
|
||||
cats = mapUnionWith (\(hypos,_,_) (p,fns) -> (hypos,fns,p)) (cats (abstract pgf)) (catProbs probs)
|
||||
funs = mapUnionWith (\(ty,ps,a,df,_) p -> (ty,ps,a,df, p)) (funs (abstract pgf)) (funProbs probs),
|
||||
cats = mapUnionWith (\(hypos,_,_) (p,fns) -> (hypos,fns,p)) (cats (abstract pgf)) (catProbs probs)
|
||||
}}
|
||||
where
|
||||
mapUnionWith f map1 map2 =
|
||||
@@ -95,8 +95,8 @@ probTree :: PGF -> Expr -> Double
|
||||
probTree pgf t = case t of
|
||||
EApp f e -> probTree pgf f * probTree pgf e
|
||||
EFun f -> case Map.lookup f (funs (abstract pgf)) of
|
||||
Just (_,_,_,p) -> p
|
||||
Nothing -> 1
|
||||
Just (_,_,_,_,p) -> p
|
||||
Nothing -> 1
|
||||
_ -> 1
|
||||
|
||||
-- | rank from highest to lowest probability
|
||||
@@ -113,7 +113,7 @@ mkProbDefs pgf =
|
||||
hyps0
|
||||
[1..]
|
||||
fns = [(f,ty) | (_,f) <- fs,
|
||||
let Just (ty,_,_,_) = Map.lookup f (funs (abstract pgf))]
|
||||
let Just (ty,_,_,_,_) = Map.lookup f (funs (abstract pgf))]
|
||||
]
|
||||
((_,css),eqss) = mapAccumL (\(ngen,css) (c,hyps,fns) ->
|
||||
let st0 = (1,Map.empty)
|
||||
@@ -263,7 +263,7 @@ computeConstrs pgf st fns =
|
||||
where
|
||||
addArgs (cn,fns) = addArg (length args) cn [] fns
|
||||
where
|
||||
Just (DTyp args _ _es,_,_,_) = Map.lookup cn (funs (abstract pgf))
|
||||
Just (DTyp args _ _es,_,_,_,_) = Map.lookup cn (funs (abstract pgf))
|
||||
|
||||
addArg 0 cn ps fns = [(PApp cn (reverse ps),fns)]
|
||||
addArg n cn ps fns = concat [addArg (n-1) cn (arg:ps) fns' | (arg,fns') <- computeConstr fns]
|
||||
|
||||
@@ -38,7 +38,7 @@ showInOrder abs fset remset avset =
|
||||
isArg :: Abstr -> Map.Map CId CId -> Set.Set CId -> CId -> Maybe [CId]
|
||||
isArg abs mtypes scid cid =
|
||||
let p = Map.lookup cid $ funs abs
|
||||
(ty,_,_,_) = fromJust p
|
||||
(ty,_,_,_,_) = fromJust p
|
||||
args = arguments ty
|
||||
setargs = Set.fromList args
|
||||
cond = Set.null $ Set.difference setargs scid
|
||||
@@ -51,8 +51,8 @@ typesInterm :: Abstr -> Set.Set CId -> Map.Map CId CId
|
||||
typesInterm abs fset =
|
||||
let fs = funs abs
|
||||
fsetTypes = Set.map (\x ->
|
||||
let (DTyp _ c _,_,_,_)=fromJust $ Map.lookup x fs
|
||||
in (x,c)) fset
|
||||
let (DTyp _ c _,_,_,_,_)=fromJust $ Map.lookup x fs
|
||||
in (x,c)) fset
|
||||
in Map.fromList $ Set.toList fsetTypes
|
||||
|
||||
{-
|
||||
@@ -67,7 +67,7 @@ doesReturnCat (DTyp _ c _) cat = c == cat
|
||||
returnCat :: Abstr -> CId -> CId
|
||||
returnCat abs cid =
|
||||
let p = Map.lookup cid $ funs abs
|
||||
(DTyp _ c _,_,_,_) = fromJust p
|
||||
(DTyp _ c _,_,_,_,_) = fromJust p
|
||||
in if isNothing p then error $ "not found "++ show cid ++ " in abstract "
|
||||
else c
|
||||
|
||||
|
||||
@@ -135,8 +135,8 @@ lookupCatHyps cat = TcM (\abstr k h ms -> case Map.lookup cat (cats abstr) of
|
||||
|
||||
lookupFunType :: CId -> TcM s Type
|
||||
lookupFunType fun = TcM (\abstr k h ms -> case Map.lookup fun (funs abstr) of
|
||||
Just (ty,_,_,_) -> k ty ms
|
||||
Nothing -> h (UnknownFun fun))
|
||||
Just (ty,_,_,_,_) -> k ty ms
|
||||
Nothing -> h (UnknownFun fun))
|
||||
|
||||
typeGenerators :: Scope -> CId -> TcM s [(Double,Expr,TType)]
|
||||
typeGenerators scope cat = fmap normalize (liftM2 (++) x y)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
name: pgf
|
||||
version: 3.10
|
||||
version: 3.9.1-git
|
||||
|
||||
cabal-version: >= 1.20
|
||||
build-type: Simple
|
||||
@@ -12,6 +12,11 @@ bug-reports: https://github.com/GrammaticalFramework/gf-core/issues
|
||||
maintainer: Thomas Hallgren
|
||||
tested-with: GHC==7.6.3, GHC==7.8.3, GHC==7.10.3, GHC==8.0.2
|
||||
|
||||
flag custom-binary
|
||||
Description: Use a customised version of the binary package
|
||||
Default: True
|
||||
Manual: True
|
||||
|
||||
Library
|
||||
default-language: Haskell2010
|
||||
build-depends: base >= 4.6 && <5,
|
||||
@@ -24,14 +29,17 @@ Library
|
||||
mtl,
|
||||
exceptions
|
||||
|
||||
other-modules:
|
||||
-- not really part of GF but I have changed the original binary library
|
||||
-- and we have to keep the copy for now.
|
||||
Data.Binary
|
||||
Data.Binary.Put
|
||||
Data.Binary.Get
|
||||
Data.Binary.Builder
|
||||
Data.Binary.IEEE754
|
||||
if flag(custom-binary)
|
||||
other-modules:
|
||||
-- not really part of GF but I have changed the original binary library
|
||||
-- and we have to keep the copy for now.
|
||||
Data.Binary
|
||||
Data.Binary.Put
|
||||
Data.Binary.Get
|
||||
Data.Binary.Builder
|
||||
Data.Binary.IEEE754
|
||||
else
|
||||
build-depends: binary, data-binary-ieee754
|
||||
|
||||
--ghc-options: -fwarn-unused-imports
|
||||
--if impl(ghc>=7.8)
|
||||
|
||||
118
src/server/gf-server.cabal
Normal file
@@ -0,0 +1,118 @@
|
||||
name: gf-server
|
||||
version: 1.0
|
||||
cabal-version: >= 1.8
|
||||
build-type: Custom
|
||||
license: GPL
|
||||
license-file: ../../LICENSE
|
||||
synopsis: FastCGI Server for Grammatical Framework
|
||||
|
||||
flag content
|
||||
Description:
|
||||
Build content service (requires fastcgi and hsql-mysql packages)
|
||||
(In Ubuntu: apt-get install libghc-fastcgi-dev libghc-hsql-mysql-dev)
|
||||
Default: False
|
||||
|
||||
flag http
|
||||
Description: Build pgf-http (deprecated, replaced by gf -server)
|
||||
Default: False
|
||||
|
||||
flag fastcgi
|
||||
Description: Build librar & pgf-service executable with fastcgi support
|
||||
Default: True
|
||||
|
||||
flag c-runtime
|
||||
Description: Include functionality from the C run-time library (which must be installed already)
|
||||
Default: False
|
||||
|
||||
flag network-uri
|
||||
description: Get Network.URI from the network-uri package
|
||||
default: True
|
||||
|
||||
Library
|
||||
exposed-modules: PGFService FastCGIUtils CGIUtils ServeStaticFile RunHTTP Cache
|
||||
other-modules: URLEncoding CGI Fold
|
||||
hs-source-dirs: . transfer
|
||||
|
||||
if flag(fastcgi)
|
||||
build-depends: fastcgi >= 3001.0.2.2
|
||||
-- Install it in Ubuntu with: apt-get install libghc-fastcgi-dev
|
||||
else
|
||||
Buildable: False
|
||||
|
||||
build-depends: base >=4.2 && <5,
|
||||
time, time-compat, old-locale,
|
||||
directory,
|
||||
filepath,
|
||||
containers,
|
||||
process,
|
||||
gf >= 3.6,
|
||||
cgi >= 3001.1.7.3,
|
||||
httpd-shed>=0.4.0.2,
|
||||
mtl,
|
||||
exceptions,
|
||||
json >= 0.3.3,
|
||||
utf8-string >= 0.3.1.1,
|
||||
bytestring,
|
||||
pretty,
|
||||
random
|
||||
|
||||
if flag(network-uri)
|
||||
build-depends: network-uri>=2.6, network>=2.6
|
||||
else
|
||||
build-depends: network>=2.3 && <2.6
|
||||
|
||||
ghc-options: -fwarn-unused-imports
|
||||
if os(windows)
|
||||
ghc-options: -optl-mwindows
|
||||
else
|
||||
build-depends: unix
|
||||
|
||||
if flag(c-runtime)
|
||||
cpp-options: -DC_RUNTIME
|
||||
|
||||
executable pgf-http
|
||||
main-is: pgf-http.hs
|
||||
Hs-source-dirs: exec
|
||||
ghc-options: -threaded
|
||||
if impl(ghc>=7.0)
|
||||
ghc-options: -rtsopts
|
||||
|
||||
if flag(http)
|
||||
buildable: True
|
||||
build-depends: base >=4.2 && <5, gf-server, filepath, directory, cgi
|
||||
else
|
||||
buildable: False
|
||||
|
||||
executable pgf-service
|
||||
main-is: pgf-fcgi.hs
|
||||
Hs-source-dirs: exec
|
||||
ghc-options: -threaded -fwarn-unused-imports
|
||||
if impl(ghc>=7.0)
|
||||
ghc-options: -rtsopts
|
||||
|
||||
if flag(fastcgi)
|
||||
build-depends: fastcgi >= 3001.0.2.2
|
||||
-- Install it in Ubuntu with: apt-get install libghc-fastcgi-dev
|
||||
else
|
||||
Buildable: False
|
||||
|
||||
build-depends: base >=4.2 && <5, gf-server
|
||||
|
||||
if os(windows)
|
||||
ghc-options: -optl-mwindows
|
||||
else
|
||||
build-depends: unix
|
||||
|
||||
executable content-service
|
||||
if flag(content)
|
||||
build-depends: base >=4.2 && <5, old-locale,
|
||||
fastcgi >= 3001.0.2.2,
|
||||
-- In Ubuntu: apt-get install libghc-fastcgi-dev
|
||||
hsql-mysql, hsql
|
||||
-- In Ubuntu: apt-get install libghc-hsql-mysql-dev
|
||||
buildable: True
|
||||
else
|
||||
buildable: False
|
||||
|
||||
main-is: ContentService.hs
|
||||
Hs-source-dirs: exec
|
||||
@@ -1,4 +1,4 @@
|
||||
# Run with (with -D for no-daemon)
|
||||
# Run with (with -D for no-daemon)
|
||||
# /usr/sbin/lighttpd -f lighttpd.conf -D
|
||||
#
|
||||
|
||||
@@ -10,9 +10,8 @@ server.modules = (
|
||||
"mod_cgi"
|
||||
)
|
||||
|
||||
var.basedir = var.CWD
|
||||
var.basedir = var.CWD
|
||||
|
||||
# John: no longer valid after removing `src/ui` 2018-11-15
|
||||
server.document-root = basedir + "/../ui/gwt/www"
|
||||
|
||||
server.errorlog = basedir + "/error.log"
|
||||
@@ -97,3 +96,4 @@ static-file.exclude-extensions = ( ".php", ".pl", ".fcgi" )
|
||||
|
||||
## bind to port (default: 80)
|
||||
server.port = 41296
|
||||
|
||||
|
||||
102
src/tools/Htmls.hs
Normal file
@@ -0,0 +1,102 @@
|
||||
----------------------------------------------------------------------
|
||||
-- |
|
||||
-- Maintainer : AR
|
||||
-- Stability : (stable)
|
||||
-- Portability : (portable)
|
||||
--
|
||||
-- > CVS $Date: 2005/05/16 17:07:18 $
|
||||
-- > CVS $Author: aarne $
|
||||
-- > CVS $Revision: 1.11 $
|
||||
--
|
||||
-- chop an HTML file into separate files, each linked to the next and previous.
|
||||
-- the names of the files are n-file, with n = 01,02,...
|
||||
-- the chopping is performed at each separator, here defined as @\<!-- NEW --\>@
|
||||
--
|
||||
-- AR 7\/1\/2002 for the Vinnova meeting in Linköping.
|
||||
-- Added table of contents generation in file 00, 16/4/2005
|
||||
-----------------------------------------------------------------------------
|
||||
|
||||
module Main (main) where
|
||||
|
||||
import System.Environment(getArgs)
|
||||
import Data.Char
|
||||
|
||||
main :: IO ()
|
||||
main = do
|
||||
file:_ <- getArgs
|
||||
htmls file
|
||||
|
||||
htmls :: FilePath -> IO ()
|
||||
htmls file = do
|
||||
s <- readFile file
|
||||
let ss = allPages file s
|
||||
lg = length ss
|
||||
putStrLn $ show lg ++ " slides"
|
||||
mapM_ (uncurry writeFile . mkFile file lg) ss
|
||||
|
||||
allPages :: FilePath -> String -> [(Int,String)]
|
||||
allPages file s = addIndex $ zip [1..] $ map unlines $ chop lss where
|
||||
chop ls = case span isNoSep ls of
|
||||
(s,_:ss) -> s : chop ss
|
||||
_ -> [ls]
|
||||
isNoSep = (/= separator)
|
||||
addIndex = ((0,mkIndex file lss) :)
|
||||
lss = lines s
|
||||
|
||||
mkFile :: FilePath -> Int -> (Int,String) -> (FilePath,String)
|
||||
mkFile base mx (number,content) =
|
||||
(fileName base number,
|
||||
unlines [
|
||||
begHTML,
|
||||
"<font size=1>",
|
||||
pageNum mx number,
|
||||
link base mx number,
|
||||
"</font>",
|
||||
"<p>",
|
||||
content,
|
||||
endHTML
|
||||
]
|
||||
)
|
||||
|
||||
begHTML, endHTML, separator :: String
|
||||
begHTML = "<html><body bgcolor=\"#FFFFFF\" text=\"#000000\">"
|
||||
endHTML = "</body></html>"
|
||||
separator = "<!-- NEW -->"
|
||||
|
||||
link :: FilePath -> Int -> Int -> String
|
||||
link file mx n =
|
||||
(if n >= mx-1 then "" else (" <a href=\"" ++ file' ++ "\">Next</a>")) ++
|
||||
(if n == 1 then "" else (" <a href=\"" ++ file_ ++ "\">Previous</a>")) ++
|
||||
(" <a href=\"" ++ file0 ++ "\">Contents</a>") ++
|
||||
(" <a href=\"" ++ file ++ "\">Fulltext</a>") ++
|
||||
(" <a href=\"" ++ file1 ++ "\">First</a>") ++
|
||||
(" <a href=\"" ++ file2 ++ "\">Last</a>")
|
||||
where
|
||||
file_ = fileName file (n - 1)
|
||||
file' = fileName file (n + 1)
|
||||
file0 = fileName file 0
|
||||
file1 = fileName file 1
|
||||
file2 = fileName file (mx - 1)
|
||||
|
||||
fileName :: FilePath -> Int -> FilePath
|
||||
fileName file n = (if n < 10 then ('0':) else id) $ show n ++ "-" ++ file
|
||||
|
||||
pageNum mx num = "<p align=right>" ++ show num ++"/" ++ show (mx-1) ++ "</p>"
|
||||
|
||||
mkIndex file = unlines . mkInd 1 where
|
||||
mkInd n ss = case ss of
|
||||
s : rest | (s==separator) -> mkInd (n+1) rest
|
||||
s : rest -> case getHeading s of
|
||||
Just (i,t) -> mkLine n i t : mkInd n rest
|
||||
_ -> mkInd n rest
|
||||
_ -> []
|
||||
getHeading s = case dropWhile isSpace s of
|
||||
'<':h:i:_:t | isDigit i -> return (i,take (length t - 5) t) -- drop final </hi>
|
||||
_ -> Nothing
|
||||
mkLine _ '1' t = t ++ " : Table of Contents<p>" -- heading of whole document
|
||||
mkLine n i t = stars i ++ link n t ++ "<br>"
|
||||
stars i = case i of
|
||||
'3' -> "<li> "
|
||||
'4' -> "<li>* "
|
||||
_ -> ""
|
||||
link n t = "<a href=\"" ++ fileName file n ++ "\">" ++ t ++ "</a>"
|
||||
@@ -6,3 +6,7 @@ cabal-version: >= 1.8
|
||||
Executable gfdoc
|
||||
main-is: GFDoc.hs
|
||||
build-depends: base, directory>=1.2, time>=1.5, process
|
||||
|
||||
Executable htmls
|
||||
main-is: Htmls.hs
|
||||
build-depends: base
|
||||
9
src/ui/android/.classpath
Normal file
@@ -0,0 +1,9 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<classpath>
|
||||
<classpathentry kind="src" path="src"/>
|
||||
<classpathentry kind="src" path="gen"/>
|
||||
<classpathentry kind="con" path="com.android.ide.eclipse.adt.ANDROID_FRAMEWORK"/>
|
||||
<classpathentry exported="true" kind="con" path="com.android.ide.eclipse.adt.LIBRARIES"/>
|
||||
<classpathentry exported="true" kind="con" path="com.android.ide.eclipse.adt.DEPENDENCIES"/>
|
||||
<classpathentry kind="output" path="bin/classes"/>
|
||||
</classpath>
|
||||
33
src/ui/android/.project
Normal file
@@ -0,0 +1,33 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<projectDescription>
|
||||
<name>GFTranslator</name>
|
||||
<comment></comment>
|
||||
<projects>
|
||||
</projects>
|
||||
<buildSpec>
|
||||
<buildCommand>
|
||||
<name>com.android.ide.eclipse.adt.ResourceManagerBuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
<buildCommand>
|
||||
<name>com.android.ide.eclipse.adt.PreCompilerBuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.jdt.core.javabuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
<buildCommand>
|
||||
<name>com.android.ide.eclipse.adt.ApkBuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
</buildSpec>
|
||||
<natures>
|
||||
<nature>com.android.ide.eclipse.adt.AndroidNature</nature>
|
||||
<nature>org.eclipse.jdt.core.javanature</nature>
|
||||
</natures>
|
||||
</projectDescription>
|
||||
63
src/ui/android/AndroidManifest.xml
Normal file
@@ -0,0 +1,63 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
package="org.grammaticalframework.ui.android"
|
||||
android:versionCode="15"
|
||||
android:versionName="1.2.2"
|
||||
android:installLocation="auto" >
|
||||
|
||||
<uses-sdk
|
||||
android:minSdkVersion="11"
|
||||
android:targetSdkVersion="18" />
|
||||
|
||||
<uses-permission android:name="android.permission.RECORD_AUDIO" />
|
||||
|
||||
<application
|
||||
android:allowBackup="true"
|
||||
android:icon="@drawable/ic_app"
|
||||
android:label="@string/app_name"
|
||||
android:theme="@style/AppTheme" android:name="GFTranslator">
|
||||
<activity
|
||||
android:name=".MainActivity"
|
||||
android:label="@string/app_name" >
|
||||
<intent-filter>
|
||||
<action android:name="android.intent.action.MAIN" />
|
||||
|
||||
<category android:name="android.intent.category.LAUNCHER" />
|
||||
</intent-filter>
|
||||
</activity>
|
||||
<activity android:name="AlternativesActivity">
|
||||
<intent-filter>
|
||||
<action android:name="android.intent.action.VIEW"/>
|
||||
<category android:name="android.intent.category.DEFAULT"/>
|
||||
<category android:name="android.intent.category.BROWSABLE"/>
|
||||
<data android:scheme="gf-translator"/>
|
||||
</intent-filter>
|
||||
</activity>
|
||||
<activity android:name="HelpActivity"></activity>
|
||||
<activity android:name="SemanticGraphActivity"
|
||||
android:launchMode="singleTop">
|
||||
<intent-filter>
|
||||
<action android:name="android.intent.action.SEARCH" />
|
||||
</intent-filter>
|
||||
<meta-data android:name="android.app.searchable"
|
||||
android:resource="@xml/searchable"/>
|
||||
<meta-data android:name="android.app.default_searchable"
|
||||
android:value=".SearchableActivity"/>
|
||||
</activity>
|
||||
|
||||
<activity android:name="se.chalmers.phrasebook.gui.activities.NavigationActivity"></activity>
|
||||
|
||||
<service android:name="TranslatorInputMethodService"
|
||||
android:permission="android.permission.BIND_INPUT_METHOD">
|
||||
<intent-filter>
|
||||
<action android:name="android.view.InputMethod" />
|
||||
</intent-filter>
|
||||
<meta-data android:name="android.view.im" android:resource="@xml/method" />
|
||||
</service>
|
||||
<provider android:name=".LexiconSuggestionProvider"
|
||||
android:authorities="org.grammaticalframework.ui.android.LexiconSuggestionProvider">
|
||||
<path-permission android:pathPrefix="/search_suggest_query"
|
||||
android:readPermission="android.permission.GLOBAL_SEARCH"/>
|
||||
</provider>
|
||||
</application>
|
||||
</manifest>
|
||||
27
src/ui/android/LICENSE
Normal file
@@ -0,0 +1,27 @@
|
||||
BSD LICENSE
|
||||
|
||||
Copyright (c) 1998, Grammatical Framework
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
* Neither the name of the <organization> nor the
|
||||
names of its contributors may be used to endorse or promote products
|
||||
derived from this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
|
||||
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
68
src/ui/android/README
Normal file
@@ -0,0 +1,68 @@
|
||||
= Overview =
|
||||
|
||||
This directory contains a sample Android app tht uses
|
||||
the Android speech recognition and TTS APIs along with
|
||||
JNI bindings to the C PGF runtime to implement a simple
|
||||
speech translation app.
|
||||
|
||||
|
||||
= Requirements =
|
||||
|
||||
1. Android SDK: http://developer.android.com/sdk/
|
||||
installed in $ANDROID_SDK_LOCATION
|
||||
|
||||
2. Android NDK: http://developer.android.com/tools/sdk/ndk/
|
||||
installed in $ANDROID_NDK_LOCATION
|
||||
|
||||
= Building =
|
||||
|
||||
Set up Android project:
|
||||
|
||||
# Creates local.properties, not to be checked in
|
||||
$ $ANDROID_SDK_LOCATION/tools/android update project -p .
|
||||
|
||||
Build libs/libjpgf.jar:
|
||||
|
||||
$ (cd ../../runtime/java && javac org/grammaticalframework/*/*.java && jar -cf ../../ui/android/libs/jpgf.jar org/grammaticalframework/*/*.class)
|
||||
|
||||
Build JNI code:
|
||||
|
||||
$ cd jni
|
||||
$ $ANDROID_NDK_LOCATION/ndk-build
|
||||
|
||||
Build the semantic database code:
|
||||
|
||||
$ runghc glosses.hs
|
||||
|
||||
Build APK:
|
||||
|
||||
$ ant debug
|
||||
|
||||
|
||||
Install on your device:
|
||||
|
||||
$ ant debug install
|
||||
|
||||
or:
|
||||
|
||||
$ adb install -r bin/MainActivity-debug.apk
|
||||
|
||||
|
||||
= Changing the grammar =
|
||||
|
||||
1. Replace assets/ResourceDemo.pgf
|
||||
|
||||
2. Edit Translator.java to point to the new file and include its metadata
|
||||
|
||||
|
||||
= Developing in Eclipse =
|
||||
|
||||
1. Install Android ADT
|
||||
|
||||
2. Eclipse > File > Import > Existing Projects into Workspace > Next
|
||||
|
||||
3. Select root directory...
|
||||
|
||||
4. Select GF/src/ui/android
|
||||
|
||||
5. Finish
|
||||
2
src/ui/android/ant.properties
Normal file
@@ -0,0 +1,2 @@
|
||||
key.store=/home/krasimir/dg/src/keys/dg_keystore
|
||||
key.alias=dg
|
||||
157
src/ui/android/assets/help_content.html
Normal file
@@ -0,0 +1,157 @@
|
||||
<html>
|
||||
<body>
|
||||
</p>
|
||||
<b>GF Offline Translator</b>:
|
||||
text and speech translation for 16 languages with
|
||||
quality control. Version 19 April 2017 (beta).
|
||||
</p>
|
||||
<p>
|
||||
<b>Speech input</b>: Tap microphone icon and talk while it is red.
|
||||
<br>
|
||||
<b>Text input</b>: Select "keyboard" from menu, tap keyboard icon.
|
||||
<br>
|
||||
<b>Correction</b>: Tap input text and edit.
|
||||
<br>
|
||||
<b>Alternatives</b>: Tap output text.
|
||||
<br>
|
||||
<b>Grammar info</b>: Tap any of the alternatives.
|
||||
<br>
|
||||
<b>Confidence</b>: colour of output text
|
||||
<ul>
|
||||
<li><span style="background-color:palegreen">Green</span>: semantic, probably correct (but has alternatives)</li>
|
||||
<li><span style="background-color:yellow">Yellow</span>: syntactic, often incorrect (has alternatives)</li>
|
||||
<li><span style="background-color:pink">Light red</span>: chunk-based, probably incorrect (has alternatives)</li>
|
||||
<li><span style="background-color:red">Dark red</span>: word-based, often very bad</li>
|
||||
</ul>
|
||||
<br>
|
||||
<b>Topics</b> (new, select from menu): Words grouped by semantic categories, with WordNet glosses.
|
||||
<br>
|
||||
<b>Phrasebook</b> (new, select from menu): Conceptual authoring with the green translations.
|
||||
</p>
|
||||
|
||||
<hr>
|
||||
|
||||
<h2>More details</h2>
|
||||
<p>
|
||||
GF Offline Translator is based on grammar and semantics. It is compact in size
|
||||
and gives control on quality. Its technology is inspired by compilers, which are
|
||||
programs that translate computer languages.
|
||||
Most other translators for human language are based on
|
||||
statistics and have less control of quality and are much bigger, so that
|
||||
they require either an internet connection or a huge storage on your phone.
|
||||
</p>
|
||||
The app indicates translation confidence with colours:
|
||||
<ul>
|
||||
<li><b>Green</b>:
|
||||
semantic translation, should be correct.
|
||||
But not necessarily the only correct one.
|
||||
You can tap the output to see alternatives.
|
||||
</li>
|
||||
<li><b>Yellow</b>:
|
||||
syntactic translation, should be grammatically correct.
|
||||
But can be very strange in its interpretation and choice of words.
|
||||
</li>
|
||||
<li><b>Light red</b>:
|
||||
chunk translation, probably incorrect.
|
||||
Builds the translation from small pieces.
|
||||
</li>
|
||||
<li><b>Dark red</b>:
|
||||
word-by-word translation, almost certainly incorrect.
|
||||
Builds the translation word by word.
|
||||
</li>
|
||||
</ul>
|
||||
The green translations come from a tourist phrasebook, which allows
|
||||
you to translate things like "hello" and "how far is the airport from
|
||||
the hotel".
|
||||
</p>
|
||||
|
||||
<p>
|
||||
You can translate both speech and text, as selected in the menu in the
|
||||
upper right corner. Both kinds of input can be edited with the
|
||||
keyboard by first tapping at the input field. This is often needed
|
||||
because of <b>speech recognition errors</b>. Changing words
|
||||
from <b>upper to lower case</b> may also be needed.
|
||||
At the moment, <b>Japanese</b> and <b>Thai</b> input must be separated
|
||||
to words, whereas Chinese works without spaces.
|
||||
</p>
|
||||
<p>
|
||||
Translation works between any of the 16 supported languages, which means 240
|
||||
language pairs in the current version. But different languages are on different levels of development.
|
||||
The following table gives a rough idea of what to expect:
|
||||
</p>
|
||||
|
||||
<p>
|
||||
<center>
|
||||
<table rules=all border=yes>
|
||||
<tr> <th></th> <th>coverage</th> <th>quality</th> <th>speed</th> <th>speech</th> </tr>
|
||||
<tr> <th>Bulgarian</th> <td bgcolor=yellow></td> <td bgcolor=yellow></td><td bgcolor=palegreen></td> <td bgcolor=yellow>in only</td></tr>
|
||||
<tr> <th>Catalan</th> <td bgcolor=pink></td> <td bgcolor=yellow></td><td bgcolor=pink></td> <td bgcolor=yellow></td></tr>
|
||||
<tr> <th>Chinese</th> <td bgcolor=pink></td> <td bgcolor=pink></td> <td bgcolor=palegreen></td> <td bgcolor=yellow></td> </tr>
|
||||
<tr> <th>Dutch</th> <td bgcolor=yellow></td> <td bgcolor=yellow></td> <td bgcolor=yellow></td> <td bgcolor=palegreen></td> </tr>
|
||||
<tr> <th>English</th> <td bgcolor=palegreen></td> <td bgcolor=palegreen></td> <td bgcolor=palegreen></td> <td bgcolor=palegreen></td> </tr>
|
||||
<tr> <th>Estonian</th> <td bgcolor=yellow></td> <td bgcolor=yellow></td><td bgcolor=pink></td> <td bgcolor=red></td></tr>
|
||||
<tr> <th>Finnish</th> <td bgcolor=yellow></td> <td bgcolor=yellow></td><td bgcolor=pink></td> <td bgcolor=yellow></td></tr>
|
||||
<tr> <th>French</th> <td bgcolor=pink></td> <td bgcolor=yellow></td><td bgcolor=pink></td> <td bgcolor=palegreen></td></tr>
|
||||
<tr> <th>German</th> <td bgcolor=pink></td> <td bgcolor=yellow></td><td bgcolor=pink></td> <td bgcolor=palegreen></td></tr>
|
||||
<tr> <th>Hindi</th> <td bgcolor=pink></td> <td bgcolor=red></td> <td bgcolor=yellow></td> <td bgcolor=yellow></td> </tr>
|
||||
<tr> <th>Italian</th> <td bgcolor=pink></td> <td bgcolor=pink></td><td bgcolor=pink></td> <td bgcolor=palegreen></td></tr>
|
||||
<tr> <th>Japanese*</th><td bgcolor=pink></td> <td bgcolor=pink></td><td bgcolor=yellow></td> <td bgcolor=palegreen></td></tr>
|
||||
<tr> <th>Russian</th> <td bgcolor=pink></td> <td bgcolor=red></td> <td bgcolor=yellow></td> <td bgcolor=yellow></td> </tr>
|
||||
<tr> <th>Spanish</th> <td bgcolor=pink></td> <td bgcolor=yellow></td><td bgcolor=pink></td> <td bgcolor=palegreen></td></tr>
|
||||
<tr> <th>Swedish</th> <td bgcolor=yellow></td> <td bgcolor=yellow></td> <td bgcolor=palegreen></td> <td bgcolor=yellow></td></tr>
|
||||
<tr> <th>Thai*</th> <td bgcolor=pink></td> <td bgcolor=pink></td><td bgcolor=yellow></td> <td bgcolor=palegreen></td></tr>
|
||||
</table>
|
||||
</center>
|
||||
* For translation from Japanese and Thai you need to separate each word with a space
|
||||
</p>
|
||||
<p>
|
||||
The speech input and output use Google's voice services. Their status
|
||||
can hence change without notice. You can make it more stable by
|
||||
installing third-party speech tools, such as SVOX, which provides
|
||||
output for most of the listed languages.
|
||||
</p>
|
||||
|
||||
<p>
|
||||
When you tap on a translation you get a screen with <b>alternative translations</b>.
|
||||
Tapping on each of the alternatives
|
||||
gives you <b>grammatical information</b>:
|
||||
an inflection table, if it is a single word,
|
||||
and a syntax tree otherwise.
|
||||
</p>
|
||||
|
||||
<p>
|
||||
The app also provides an <b>input method</b> which you can use as
|
||||
an alternative keyboard which allows you to do translation from
|
||||
other applications, for instance while you are entering SMS or e-mail.
|
||||
To activate it go to Settings > Language & input.
|
||||
</p>
|
||||
|
||||
<p>
|
||||
The translation works <b>completely off-line</b>, without
|
||||
internet connection, when doing text-based translation.
|
||||
Even speech works off-line in some languages,
|
||||
but being on-line may give you better
|
||||
speech input and output and more languages.
|
||||
</p>
|
||||
<p>
|
||||
You can also install third-party off-line speech engines, such as
|
||||
<a href="https://play.google.com/store/apps/developer?id=SVOX+Mobile+Voices&hl=en">SVOX</a>.
|
||||
Consult the voice/language settings on your phone to find the optimal
|
||||
speech engines, and restart the app after changing the settings.
|
||||
</p>
|
||||
|
||||
<p>
|
||||
The GF Offline Translator is powered by
|
||||
<a href="http://www.grammaticalframework.org/">GF</a>, Grammatical Framework.
|
||||
It is open-source software,
|
||||
built by support from the GF community and from <a href="http://www.digitalgrammars.com/">Digital Grammars</a>.
|
||||
</p>
|
||||
|
||||
<p>
|
||||
<i>Digital Grammars is a company that can tailor this app to you needs and provide good
|
||||
translation for the kind of vocabulary you need. Just tell us what you want to see
|
||||
in the green area!</i>
|
||||
</p>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
1035
src/ui/android/assets/phrases.xml
Normal file
92
src/ui/android/build.xml
Normal file
@@ -0,0 +1,92 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project name="MainActivity" default="help">
|
||||
|
||||
<!-- The local.properties file is created and updated by the 'android' tool.
|
||||
It contains the path to the SDK. It should *NOT* be checked into
|
||||
Version Control Systems. -->
|
||||
<property file="local.properties" />
|
||||
|
||||
<!-- The ant.properties file can be created by you. It is only edited by the
|
||||
'android' tool to add properties to it.
|
||||
This is the place to change some Ant specific build properties.
|
||||
Here are some properties you may want to change/update:
|
||||
|
||||
source.dir
|
||||
The name of the source directory. Default is 'src'.
|
||||
out.dir
|
||||
The name of the output directory. Default is 'bin'.
|
||||
|
||||
For other overridable properties, look at the beginning of the rules
|
||||
files in the SDK, at tools/ant/build.xml
|
||||
|
||||
Properties related to the SDK location or the project target should
|
||||
be updated using the 'android' tool with the 'update' action.
|
||||
|
||||
This file is an integral part of the build system for your
|
||||
application and should be checked into Version Control Systems.
|
||||
|
||||
-->
|
||||
<property file="ant.properties" />
|
||||
|
||||
<!-- if sdk.dir was not set from one of the property file, then
|
||||
get it from the ANDROID_HOME env var.
|
||||
This must be done before we load project.properties since
|
||||
the proguard config can use sdk.dir -->
|
||||
<property environment="env" />
|
||||
<condition property="sdk.dir" value="${env.ANDROID_HOME}">
|
||||
<isset property="env.ANDROID_HOME" />
|
||||
</condition>
|
||||
|
||||
<!-- The project.properties file is created and updated by the 'android'
|
||||
tool, as well as ADT.
|
||||
|
||||
This contains project specific properties such as project target, and library
|
||||
dependencies. Lower level build properties are stored in ant.properties
|
||||
(or in .classpath for Eclipse projects).
|
||||
|
||||
This file is an integral part of the build system for your
|
||||
application and should be checked into Version Control Systems. -->
|
||||
<loadproperties srcFile="project.properties" />
|
||||
|
||||
<!-- quick check on sdk.dir -->
|
||||
<fail
|
||||
message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_HOME environment variable."
|
||||
unless="sdk.dir"
|
||||
/>
|
||||
|
||||
<!--
|
||||
Import per project custom build rules if present at the root of the project.
|
||||
This is the place to put custom intermediary targets such as:
|
||||
-pre-build
|
||||
-pre-compile
|
||||
-post-compile (This is typically used for code obfuscation.
|
||||
Compiled code location: ${out.classes.absolute.dir}
|
||||
If this is not done in place, override ${out.dex.input.absolute.dir})
|
||||
-post-package
|
||||
-post-build
|
||||
-pre-clean
|
||||
-->
|
||||
<import file="custom_rules.xml" optional="true" />
|
||||
|
||||
<!-- Import the actual build file.
|
||||
|
||||
To customize existing targets, there are two options:
|
||||
- Customize only one target:
|
||||
- copy/paste the target into this file, *before* the
|
||||
<import> task.
|
||||
- customize it to your needs.
|
||||
- Customize the whole content of build.xml
|
||||
- copy/paste the content of the rules files (minus the top node)
|
||||
into this file, replacing the <import> task.
|
||||
- customize to your needs.
|
||||
|
||||
***********************
|
||||
****** IMPORTANT ******
|
||||
***********************
|
||||
In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
|
||||
in order to avoid having your file be overridden by tools such as "android update project"
|
||||
-->
|
||||
<!-- version-tag: 1 -->
|
||||
<import file="/Users/aarne/Library/Android/apache-ant-1.9.4/fetch.xml" />
|
||||
|
||||
</project>
|
||||
60
src/ui/android/gf-translator-alignment.dot
Normal file
@@ -0,0 +1,60 @@
|
||||
digraph {
|
||||
rankdir=LR ;
|
||||
node [shape = record] ;
|
||||
bgcolor = "#FFFFFF00" ;
|
||||
|
||||
struct0[label = "<n0>твоят | <n1>телефон | <n2>може | <n3>да | <n4>превежда"] ;
|
||||
struct0:n0:e -> struct1:n0:w ;
|
||||
struct0:n1:e -> struct1:n1:w ;
|
||||
struct0:n2:e -> struct1:n2:w ;
|
||||
struct0:n4:e -> struct1:n3:w ;
|
||||
struct1[label = "<n0>el teu | <n1>telèfon | <n2>sap | <n3>traduir"] ;
|
||||
struct1:n0:e -> struct2:n0:w ;
|
||||
struct1:n1:e -> struct2:n2:w ;
|
||||
struct1:n2:e -> struct2:n3:w ;
|
||||
struct1:n3:e -> struct2:n4:w ;
|
||||
struct2[label = "<n0>你 | <n1>的 | <n2>电 话 | <n3>会 | <n4>翻 译"] ;
|
||||
struct2:n0:e -> struct3:n0:w ;
|
||||
struct2:n2:e -> struct3:n1:w ;
|
||||
struct2:n3:e -> struct3:n2:w ;
|
||||
struct2:n4:e -> struct3:n3:w ;
|
||||
struct3[label = "<n0>je | <n1>telefoon | <n2>kan | <n3>vertalen"] ;
|
||||
struct3:n0:e -> struct4:n0:w ;
|
||||
struct3:n1:e -> struct4:n1:w ;
|
||||
struct3:n2:e -> struct4:n2:w ;
|
||||
struct3:n3:e -> struct4:n3:w ;
|
||||
struct4[label = "<n0>your | <n1>phone | <n2>can | <n3>translate"] ;
|
||||
struct4:n1:e -> struct5:n1:w ;
|
||||
struct4:n2:e -> struct5:n2:w ;
|
||||
struct4:n3:e -> struct5:n3:w ;
|
||||
struct5[label = "<n1>puhelimesi | <n2>osaa | <n3>kääntää"] ;
|
||||
struct5:n1:e -> struct6:n1:w ;
|
||||
struct5:n2:e -> struct6:n2:w ;
|
||||
struct5:n3:e -> struct6:n3:w ;
|
||||
struct6[label = "<n0>ton | <n1>téléphone | <n2>sait | <n3>traduire"] ;
|
||||
struct6:n0:e -> struct7:n0:w ;
|
||||
struct6:n1:e -> struct7:n1:w ;
|
||||
struct6:n2:e -> struct7:n2:w ;
|
||||
struct6:n3:e -> struct7:n3:w ;
|
||||
struct7[label = "<n0>dein | <n1>Telefon | <n2>kann | <n3>übersetzen"] ;
|
||||
struct7:n0:e -> struct8:n0:w ;
|
||||
struct7:n1:e -> struct8:n1:w ;
|
||||
struct7:n2:e -> struct8:n3:w ;
|
||||
struct7:n3:e -> struct8:n2:w ;
|
||||
struct8[label = "<n0>तुम्हारा | <n1>फोन | <n2>अनुवाद कर | <n3>सकता | <n4>है"] ;
|
||||
struct8:n0:e -> struct9:n1:w ;
|
||||
struct8:n1:e -> struct9:n2:w ;
|
||||
struct8:n2:e -> struct9:n4:w ;
|
||||
struct8:n3:e -> struct9:n3:w ;
|
||||
struct9[label = "<n0>il | <n1>tuo | <n2>telefono | <n3>sa | <n4>tradurre"] ;
|
||||
struct9:n1:e -> struct10:n0:w ;
|
||||
struct9:n2:e -> struct10:n1:w ;
|
||||
struct9:n3:e -> struct10:n2:w ;
|
||||
struct9:n4:e -> struct10:n3:w ;
|
||||
struct10[label = "<n0>tu | <n1>teléfono | <n2>sabe | <n3>traducir"] ;
|
||||
struct10:n0:e -> struct11:n0:w ;
|
||||
struct10:n1:e -> struct11:n1:w ;
|
||||
struct10:n2:e -> struct11:n2:w ;
|
||||
struct10:n3:e -> struct11:n3:w ;
|
||||
struct11[label = "<n0>din | <n1>telefon | <n2>kan | <n3>översätta"] ;
|
||||
}
|
||||
28
src/ui/android/gf-translator.svg
Normal file
@@ -0,0 +1,28 @@
|
||||
<?xml version="1.0" standalone="no"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
|
||||
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<?xml-stylesheet type="text/css" href="http://fonts.googleapis.com/css?family=Inconsolata"?>
|
||||
|
||||
<svg width="1024" height="500" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
|
||||
<filter id="A"><feGaussianBlur stdDeviation="2"/></filter>
|
||||
<defs>
|
||||
<linearGradient id="grad1" x1="0%" y1="0%" x2="0%" y2="100%">
|
||||
<stop offset="0%" style="stop-color:#a8d8ff;stop-opacity:1" />
|
||||
<stop offset="100%" style="stop-color:#f6f6f6;stop-opacity:1" />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<rect width="1024" height="500" style="fill:url(#grad1)"/>
|
||||
<path filter="url(#A)"
|
||||
d="M120,270 v-51 h59 m-59,0 v-53 h114 a110.5,105 0 1,1 -24,-66"
|
||||
fill="none" stroke="black" stroke-width="4" opacity="0.25"
|
||||
stroke-linejoin="round" stroke-linecap="round"/>
|
||||
<path d="M120,270 v-51 h59 m-59,0 v-53 h114 a110.5,105 0 1,1 -24,-66"
|
||||
fill="none" stroke="red" stroke-width="4"
|
||||
stroke-linejoin="round" stroke-linecap="round"/>
|
||||
<text x="310" y="120" font-size="70" font-family="Inconsolata">Offline Translation</text>
|
||||
<text x="600" y="170" font-size="35" font-family="Inconsolata">with</text>
|
||||
<text x="270" y="240" font-size="70" font-family="Inconsolata" fill="black" opacity="0.25" filter="url(#A)">Grammatical Framework</text>
|
||||
<text x="270" y="240" font-size="70" font-family="Inconsolata" fill="#06c">Grammatical Framework</text>
|
||||
<image x="0" y="270" width="1024" height="200"
|
||||
xlink:href="gf-translator-alignment.png" />
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.6 KiB |
46
src/ui/android/glosses.hs
Normal file
@@ -0,0 +1,46 @@
|
||||
import SG
|
||||
import PGF2
|
||||
import Data.Char
|
||||
import Data.List
|
||||
|
||||
main = do
|
||||
db <- openSG "assets/semantics.db"
|
||||
inTransaction db $ do
|
||||
ls <- fmap lines $ readFile "../../../lib/src/translator/Dictionary.gf"
|
||||
let glosses = [x | Just (fn,gloss) <- map gloss ls, x <- glossTriples fn gloss]
|
||||
topics <- fmap (map toTriple . lines) $ readFile "topics.txt"
|
||||
sequence_ [insertTriple db s p o | (s,p,o) <- glosses++topics]
|
||||
closeSG db
|
||||
|
||||
toTriple l =
|
||||
case readTriple l of
|
||||
Just t -> t
|
||||
Nothing -> error ("topics.txt: "++l)
|
||||
|
||||
gloss l =
|
||||
case words l of
|
||||
("fun":fn:_) -> case dropWhile (/='\t') l of
|
||||
'\t':l -> Just (fn,l)
|
||||
_ -> Nothing
|
||||
_ -> Nothing
|
||||
|
||||
glossTriples fn s =
|
||||
(if null gs then [] else [(fn_e,gloss,mkStr (merge gs))])++
|
||||
(if null es then [] else [(fn_e,example,mkStr (merge (map (init . tail) es)))])
|
||||
where
|
||||
fn_e = mkApp fn []
|
||||
gloss = mkApp "gloss" []
|
||||
example = mkApp "example" []
|
||||
(es,gs) = partition isExample (splitGloss s)
|
||||
|
||||
splitGloss s =
|
||||
let (xs,s') = break (==';') s
|
||||
in trim xs : case s' of
|
||||
';':s -> splitGloss s
|
||||
_ -> []
|
||||
where
|
||||
trim = reverse . dropWhile isSpace . reverse . dropWhile isSpace
|
||||
|
||||
merge = intercalate "; "
|
||||
|
||||
isExample s = not (null s) && head s == '"' && last s == '"'
|
||||
10
src/ui/android/hlc.svg
Normal file
@@ -0,0 +1,10 @@
|
||||
<?xml version="1.0" standalone="no"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN"
|
||||
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<?xml-stylesheet type="text/css" href="http://fonts.googleapis.com/css?family=Inconsolata"?>
|
||||
|
||||
<svg width="250" height="250" version="1.1" xmlns="http://www.w3.org/2000/svg">
|
||||
<desc>Digital Grammar Logo</desc>
|
||||
|
||||
<path d="M205,200 a110,110 0 1,1 0,-150 M125,15 v110 h80 M165,95 v60 M205,95 v60" fill="none" stroke="red" stroke-width="14" stroke-linejoin="round" stroke-linecap="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 529 B |
28
src/ui/android/jni/Android.mk
Normal file
@@ -0,0 +1,28 @@
|
||||
LOCAL_PATH := $(call my-dir)
|
||||
|
||||
include $(CLEAR_VARS)
|
||||
|
||||
jni_c_files := jpgf.c jsg.c jni_utils.c
|
||||
sg_c_files := sg.c sqlite3Btree.c
|
||||
pgf_c_files := data.c expr.c graphviz.c linearizer.c literals.c parser.c parseval.c pgf.c printer.c reader.c \
|
||||
reasoner.c evaluator.c jit.c typechecker.c lookup.c aligner.c writer.c
|
||||
gu_c_files := assert.c choice.c exn.c fun.c in.c map.c out.c utf8.c \
|
||||
bits.c defs.c enum.c file.c hash.c mem.c prime.c seq.c string.c ucs.c variant.c
|
||||
|
||||
LOCAL_MODULE := jpgf
|
||||
LOCAL_SRC_FILES := $(addprefix ../../../runtime/java/, $(jni_c_files)) \
|
||||
$(addprefix ../../../runtime/c/sg/, $(sg_c_files)) \
|
||||
$(addprefix ../../../runtime/c/pgf/, $(pgf_c_files)) \
|
||||
$(addprefix ../../../runtime/c/gu/, $(gu_c_files))
|
||||
LOCAL_C_INCLUDES := ../../../runtime/c
|
||||
|
||||
include $(BUILD_SHARED_LIBRARY)
|
||||
|
||||
$(realpath ../obj/local/armeabi/objs/jpgf/__/__/__/runtime/c/pgf/jit.o): lightning
|
||||
$(realpath ../obj/local/armeabi/objs-debug/jpgf/__/__/__/runtime/c/pgf/jit.o): lightning
|
||||
|
||||
lightning:
|
||||
ln -s -f arm/asm.h ../../../runtime/c/pgf/lightning/asm.h
|
||||
ln -s -f arm/core.h ../../../runtime/c/pgf/lightning/core.h
|
||||
ln -s -f arm/fp.h ../../../runtime/c/pgf/lightning/fp.h
|
||||
ln -s -f arm/funcs.h ../../../runtime/c/pgf/lightning/funcs.h
|
||||
3
src/ui/android/jni/Application.mk
Normal file
@@ -0,0 +1,3 @@
|
||||
APP_PLATFORM := android-8
|
||||
APP_CFLAGS := -std=gnu99
|
||||
APP_OPTIM := release
|
||||
20
src/ui/android/proguard-project.txt
Normal file
@@ -0,0 +1,20 @@
|
||||
# To enable ProGuard in your project, edit project.properties
|
||||
# to define the proguard.config property as described in that file.
|
||||
#
|
||||
# Add project specific ProGuard rules here.
|
||||
# By default, the flags in this file are appended to flags specified
|
||||
# in ${sdk.dir}/tools/proguard/proguard-android.txt
|
||||
# You can edit the include path and order by changing the ProGuard
|
||||
# include property in project.properties.
|
||||
#
|
||||
# For more details, see
|
||||
# http://developer.android.com/guide/developing/tools/proguard.html
|
||||
|
||||
# Add any project specific keep options here:
|
||||
|
||||
# If your project uses WebView with JS, uncomment the following
|
||||
# and specify the fully qualified class name to the JavaScript interface
|
||||
# class:
|
||||
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
|
||||
# public *;
|
||||
#}
|
||||
14
src/ui/android/project.properties
Normal file
@@ -0,0 +1,14 @@
|
||||
# This file is automatically generated by Android Tools.
|
||||
# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
|
||||
#
|
||||
# This file must be checked in Version Control Systems.
|
||||
#
|
||||
# To customize properties used by the Ant build system edit
|
||||
# "ant.properties", and override values to adapt the script to your
|
||||
# project structure.
|
||||
#
|
||||
# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
|
||||
#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
|
||||
|
||||
# Project target.
|
||||
target=android-21
|
||||
BIN
src/ui/android/res/drawable-hdpi/btn_close.png
Normal file
|
After Width: | Height: | Size: 2.2 KiB |
BIN
src/ui/android/res/drawable-hdpi/close_arrow.png
Normal file
|
After Width: | Height: | Size: 9.4 KiB |
BIN
src/ui/android/res/drawable-hdpi/ic_action_switch.png
Normal file
|
After Width: | Height: | Size: 436 B |
BIN
src/ui/android/res/drawable-hdpi/ic_app.png
Normal file
|
After Width: | Height: | Size: 2.4 KiB |
BIN
src/ui/android/res/drawable-hdpi/ic_drawer.png
Normal file
|
After Width: | Height: | Size: 2.8 KiB |
BIN
src/ui/android/res/drawable-hdpi/ic_keyboard.png
Normal file
|
After Width: | Height: | Size: 695 B |
BIN
src/ui/android/res/drawable-hdpi/ic_mic.png
Normal file
|
After Width: | Height: | Size: 665 B |
BIN
src/ui/android/res/drawable-hdpi/ic_search_black_24dp.png
Normal file
|
After Width: | Height: | Size: 390 B |
BIN
src/ui/android/res/drawable-hdpi/open_arrow.png
Normal file
|
After Width: | Height: | Size: 7.9 KiB |
BIN
src/ui/android/res/drawable-hdpi/sym_keyboard_delete.png
Normal file
|
After Width: | Height: | Size: 885 B |
BIN
src/ui/android/res/drawable-hdpi/sym_keyboard_return.png
Normal file
|
After Width: | Height: | Size: 536 B |
BIN
src/ui/android/res/drawable-hdpi/sym_keyboard_search.png
Normal file
|
After Width: | Height: | Size: 1.6 KiB |
BIN
src/ui/android/res/drawable-hdpi/sym_keyboard_shift.png
Normal file
|
After Width: | Height: | Size: 1.2 KiB |
BIN
src/ui/android/res/drawable-hdpi/sym_keyboard_space.png
Normal file
|
After Width: | Height: | Size: 859 B |
BIN
src/ui/android/res/drawable-mdpi/ic_action_switch.png
Normal file
|
After Width: | Height: | Size: 327 B |
BIN
src/ui/android/res/drawable-mdpi/ic_app.png
Normal file
|
After Width: | Height: | Size: 1.5 KiB |
BIN
src/ui/android/res/drawable-mdpi/ic_drawer.png
Normal file
|
After Width: | Height: | Size: 2.8 KiB |
BIN
src/ui/android/res/drawable-mdpi/ic_mic.png
Normal file
|
After Width: | Height: | Size: 437 B |
BIN
src/ui/android/res/drawable-mdpi/ic_search_black_24dp.png
Normal file
|
After Width: | Height: | Size: 249 B |
BIN
src/ui/android/res/drawable-mdpi/sym_keyboard_delete.png
Normal file
|
After Width: | Height: | Size: 465 B |