mirror of
https://github.com/GrammaticalFramework/gf-core.git
synced 2026-04-09 04:59:31 -06:00
Compare commits
210 Commits
remove-exa
...
js-binding
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0c91c325be | ||
|
|
ba93141317 | ||
|
|
12079550f8 | ||
|
|
1ceb8c0342 | ||
|
|
eab9fb88aa | ||
|
|
acd4a5e8cd | ||
|
|
a4b1fb03aa | ||
|
|
cb88b56016 | ||
|
|
ecf9b41db0 | ||
|
|
c5a75c482c | ||
|
|
32379a8d11 | ||
|
|
b56591c6b6 | ||
|
|
b94bb50ec9 | ||
|
|
e2395335cb | ||
|
|
2d9478b973 | ||
|
|
17e3f753fb | ||
|
|
498ad572ac | ||
|
|
bc61f8c191 | ||
|
|
d252cfd610 | ||
|
|
46a1bdc7ea | ||
|
|
18d0e1fad0 | ||
|
|
ab94e93b94 | ||
|
|
a229507392 | ||
|
|
6a9c917b29 | ||
|
|
9ba4a42426 | ||
|
|
bbd1c9147a | ||
|
|
4793d376d9 | ||
|
|
63606fd2d0 | ||
|
|
d6a1e87f4a | ||
|
|
ffcdaa921f | ||
|
|
f2e03bfc51 | ||
|
|
c89656f3ee | ||
|
|
c9b4318e9e | ||
|
|
1e43e7be4b | ||
|
|
44261b7582 | ||
|
|
b980bce334 | ||
|
|
bd7753db1a | ||
|
|
8c18d7162f | ||
|
|
ac039ec74f | ||
|
|
9f0ea19a1c | ||
|
|
8df2121650 | ||
|
|
8b9719bd2d | ||
|
|
b7249adf63 | ||
|
|
7a3efdfeb9 | ||
|
|
86066d4b12 | ||
|
|
af62a99bf5 | ||
|
|
ac1f304722 | ||
|
|
92720b92a4 | ||
|
|
078440ffbf | ||
|
|
68919a5e42 | ||
|
|
a5a019a124 | ||
|
|
61fe167392 | ||
|
|
fd29925173 | ||
|
|
bea6aa1d2d | ||
|
|
c628e11c01 | ||
|
|
61e7df4d1c | ||
|
|
de53a7c4db | ||
|
|
1e9188ea60 | ||
|
|
a55c7c7889 | ||
|
|
b3387e80e4 | ||
|
|
de0a997fcd | ||
|
|
0f53431221 | ||
|
|
099f2de5b4 | ||
|
|
2f2b39c5d2 | ||
|
|
f3d7d55752 | ||
|
|
2979864752 | ||
|
|
b11d7d93dc | ||
|
|
ba9aeb3322 | ||
|
|
8e2424af49 | ||
|
|
01b9e8da8d | ||
|
|
926a5cf414 | ||
|
|
21140fc0c0 | ||
|
|
3328279120 | ||
|
|
8cf4446e8c | ||
|
|
5b401f3880 | ||
|
|
b783299b73 | ||
|
|
0970d678cf | ||
|
|
bf17fa0bb2 | ||
|
|
0b3c278f49 | ||
|
|
c710bf0e84 | ||
|
|
eb46577f58 | ||
|
|
52f2739da1 | ||
|
|
fc37bc26cd | ||
|
|
bde1a6d586 | ||
|
|
25dc934871 | ||
|
|
2fdfef13d8 | ||
|
|
a928e4657e | ||
|
|
b6fd9a7744 | ||
|
|
64a2483b12 | ||
|
|
1d1e65185a | ||
|
|
c32cd7133f | ||
|
|
409731413e | ||
|
|
8a5e7fa25d | ||
|
|
e05c79a751 | ||
|
|
ef21d08225 | ||
|
|
f8346c4557 | ||
|
|
47ac01e4b9 | ||
|
|
a0c1da2548 | ||
|
|
951b884118 | ||
|
|
fc5c2b5a22 | ||
|
|
e4abff7725 | ||
|
|
a40130ddc4 | ||
|
|
71307d6518 | ||
|
|
fc1b51aa95 | ||
|
|
5fe963dd02 | ||
|
|
f32d222e71 | ||
|
|
a131b244df | ||
|
|
0accd97691 | ||
|
|
f8bd35543c | ||
|
|
a7b10ea936 | ||
|
|
7c97e5566d | ||
|
|
7288425daf | ||
|
|
260c0d07e0 | ||
|
|
26dabeab9b | ||
|
|
f7c2fb8a7d | ||
|
|
4bda53acb7 | ||
|
|
54204d2d95 | ||
|
|
9834b89a30 | ||
|
|
b3a2b53df2 | ||
|
|
77c0a8e100 | ||
|
|
86233e9c28 | ||
|
|
40e7544a2b | ||
|
|
61c1510620 | ||
|
|
eb22112178 | ||
|
|
083aa96e57 | ||
|
|
d82a53ebc6 | ||
|
|
5006b520d1 | ||
|
|
f78dfe80a2 | ||
|
|
44ac326da0 | ||
|
|
a8b23d52a8 | ||
|
|
d880a61857 | ||
|
|
7bd086ba19 | ||
|
|
ff0fe0a6c5 | ||
|
|
ef4df27d1b | ||
|
|
e9e2bd6b89 | ||
|
|
72a9eb0c8a | ||
|
|
b73f033b08 | ||
|
|
b974c09951 | ||
|
|
159b6ee331 | ||
|
|
3dec78c21c | ||
|
|
6ad9bf3dbf | ||
|
|
ee5ac81dfc | ||
|
|
1a842efeaf | ||
|
|
de005b9df3 | ||
|
|
52bc0f566e | ||
|
|
b509d08cbf | ||
|
|
fd0ee2756a | ||
|
|
34e89ac710 | ||
|
|
331d73b566 | ||
|
|
8d460ac402 | ||
|
|
5546c6d6da | ||
|
|
c380288db8 | ||
|
|
bd7bb9b34a | ||
|
|
18251e57a3 | ||
|
|
d06539c35c | ||
|
|
60738dda6d | ||
|
|
e628e3fe0f | ||
|
|
769743c5c0 | ||
|
|
1c75d417ee | ||
|
|
5c518de0f2 | ||
|
|
e6c2c844e9 | ||
|
|
883a7a95a1 | ||
|
|
831252eb81 | ||
|
|
fdc5659f80 | ||
|
|
4d34c7f66b | ||
|
|
f898c250ba | ||
|
|
5ef390f188 | ||
|
|
fa5c6a2949 | ||
|
|
d0bc368358 | ||
|
|
e0dca729d6 | ||
|
|
47c983c625 | ||
|
|
01f61c526f | ||
|
|
538fe5bddb | ||
|
|
f4052c8a5e | ||
|
|
e0a3b0030e | ||
|
|
b30456aa0c | ||
|
|
61c58316ea | ||
|
|
001d036a2c | ||
|
|
fe7d01f7e3 | ||
|
|
a7e43d872f | ||
|
|
d6fc50b40b | ||
|
|
9e02319b6d | ||
|
|
6278deb7a2 | ||
|
|
c6ec8cf302 | ||
|
|
07768ba4c4 | ||
|
|
e2401f32ca | ||
|
|
83abaa9b44 | ||
|
|
c7a14537c1 | ||
|
|
6352799ccb | ||
|
|
b1611eccd8 | ||
|
|
c8eb1010c5 | ||
|
|
14d35c8a31 | ||
|
|
9bf5c98509 | ||
|
|
8821f8baa8 | ||
|
|
8a45d23d63 | ||
|
|
cb0e919bf5 | ||
|
|
d1a435ad9d | ||
|
|
69ba677136 | ||
|
|
406eec6690 | ||
|
|
2f1ee094d2 | ||
|
|
390a6a04a1 | ||
|
|
37e0754cf0 | ||
|
|
a7b2f77227 | ||
|
|
9a7862ea9e | ||
|
|
914d54255f | ||
|
|
aea8548930 | ||
|
|
99dad48961 | ||
|
|
f7dc9a6eaf | ||
|
|
7867c8c828 | ||
|
|
632cd1e522 |
2
.ghci
2
.ghci
@@ -1,2 +1,2 @@
|
|||||||
:set -isrc/compiler -isrc/binary -isrc/runtime/haskell -isrc/server -isrc/server/transfer -idist/build/autogen -idist/build
|
:set -isrc/compiler -isrc/binary -isrc/runtime/haskell -isrc/server -isrc/example-based -isrc/server/transfer -idist/build/autogen -idist/build
|
||||||
:set -fwarn-unused-imports -optP-DSERVER_MODE -optP-DUSE_INTERRUPT -optP-DCC_LAZY -optP-include -optPdist/build/autogen/cabal_macros.h -odir dist/build/gf/gf-tmp -hidir dist/build/gf/gf-tmp -stubdir dist/build/gf/gf-tmp
|
:set -fwarn-unused-imports -optP-DSERVER_MODE -optP-DUSE_INTERRUPT -optP-DCC_LAZY -optP-include -optPdist/build/autogen/cabal_macros.h -odir dist/build/gf/gf-tmp -hidir dist/build/gf/gf-tmp -stubdir dist/build/gf/gf-tmp
|
||||||
|
|||||||
17
.gitignore
vendored
17
.gitignore
vendored
@@ -43,3 +43,20 @@ src/runtime/python/build/
|
|||||||
cabal.sandbox.config
|
cabal.sandbox.config
|
||||||
.stack-work
|
.stack-work
|
||||||
DATA_DIR
|
DATA_DIR
|
||||||
|
|
||||||
|
# Generated documentation (not exhaustive)
|
||||||
|
demos/index-numbers.html
|
||||||
|
demos/resourcegrammars.html
|
||||||
|
demos/translation.html
|
||||||
|
doc/tutorial/gf-tutorial.html
|
||||||
|
doc/index.html
|
||||||
|
doc/gf-bibliography.html
|
||||||
|
doc/gf-developers.html
|
||||||
|
doc/gf-editor-modes.html
|
||||||
|
doc/gf-people.html
|
||||||
|
doc/gf-refman.html
|
||||||
|
doc/gf-shell-reference.html
|
||||||
|
doc/icfp-2012.html
|
||||||
|
download/*.html
|
||||||
|
gf-book/index.html
|
||||||
|
src/www/gf-web-api.html
|
||||||
|
|||||||
3
Makefile
3
Makefile
@@ -20,6 +20,7 @@ doc:
|
|||||||
|
|
||||||
clean:
|
clean:
|
||||||
cabal clean
|
cabal clean
|
||||||
|
bash bin/clean_html
|
||||||
|
|
||||||
gf:
|
gf:
|
||||||
cabal build rgl-none
|
cabal build rgl-none
|
||||||
@@ -32,7 +33,7 @@ html::
|
|||||||
# number to the top of debian/changelog.
|
# number to the top of debian/changelog.
|
||||||
# (Tested on Ubuntu 15.04. You need to install dpkg-dev & debhelper.)
|
# (Tested on Ubuntu 15.04. You need to install dpkg-dev & debhelper.)
|
||||||
deb:
|
deb:
|
||||||
dpkg-buildpackage -b
|
dpkg-buildpackage -b -uc
|
||||||
|
|
||||||
# Make an OS X Installer package
|
# Make an OS X Installer package
|
||||||
pkg:
|
pkg:
|
||||||
|
|||||||
@@ -37,13 +37,6 @@ The simplest way of installing GF is with the command:
|
|||||||
cabal install
|
cabal install
|
||||||
```
|
```
|
||||||
|
|
||||||
This can be broken down into the usual sub-steps:
|
|
||||||
```
|
|
||||||
cabal configure
|
|
||||||
cabal build
|
|
||||||
cabal copy
|
|
||||||
```
|
|
||||||
|
|
||||||
For more details, see the [download page](http://www.grammaticalframework.org/download/index.html)
|
For more details, see the [download page](http://www.grammaticalframework.org/download/index.html)
|
||||||
and [developers manual](http://www.grammaticalframework.org/doc/gf-developers.html).
|
and [developers manual](http://www.grammaticalframework.org/doc/gf-developers.html).
|
||||||
|
|
||||||
|
|||||||
5
Setup.hs
5
Setup.hs
@@ -1,3 +1,4 @@
|
|||||||
|
import Distribution.System(Platform(..),OS(..))
|
||||||
import Distribution.Simple(defaultMainWithHooks,UserHooks(..),simpleUserHooks)
|
import Distribution.Simple(defaultMainWithHooks,UserHooks(..),simpleUserHooks)
|
||||||
import Distribution.Simple.LocalBuildInfo(LocalBuildInfo(..),absoluteInstallDirs,datadir)
|
import Distribution.Simple.LocalBuildInfo(LocalBuildInfo(..),absoluteInstallDirs,datadir)
|
||||||
import Distribution.Simple.Setup(BuildFlags(..),Flag(..),InstallFlags(..),CopyDest(..),CopyFlags(..),SDistFlags(..))
|
import Distribution.Simple.Setup(BuildFlags(..),Flag(..),InstallFlags(..),CopyDest(..),CopyFlags(..),SDistFlags(..))
|
||||||
@@ -73,5 +74,9 @@ dataDirFile = "DATA_DIR"
|
|||||||
default_gf :: LocalBuildInfo -> FilePath
|
default_gf :: LocalBuildInfo -> FilePath
|
||||||
default_gf lbi = buildDir lbi </> exeName' </> exeNameReal
|
default_gf lbi = buildDir lbi </> exeName' </> exeNameReal
|
||||||
where
|
where
|
||||||
|
-- shadows Distribution.Simple.BuildPaths.exeExtension, which changed type signature in Cabal 2.4
|
||||||
|
exeExtension = case hostPlatform lbi of
|
||||||
|
Platform arch Windows -> "exe"
|
||||||
|
_ -> ""
|
||||||
exeName' = "gf"
|
exeName' = "gf"
|
||||||
exeNameReal = exeName' <.> exeExtension
|
exeNameReal = exeName' <.> exeExtension
|
||||||
|
|||||||
@@ -104,9 +104,10 @@ setupWeb dest (pkg,lbi) = do
|
|||||||
copy_pgf (pgf,subdir,_) =
|
copy_pgf (pgf,subdir,_) =
|
||||||
do let src = gfo_dir </> pgf
|
do let src = gfo_dir </> pgf
|
||||||
let dst = grammars_dir </> pgf
|
let dst = grammars_dir </> pgf
|
||||||
putStrLn $ "Installing "++dst
|
|
||||||
ex <- doesFileExist src
|
ex <- doesFileExist src
|
||||||
if ex then copyFile src dst else return ()
|
if ex then do putStrLn $ "Installing "++dst
|
||||||
|
copyFile src dst
|
||||||
|
else putStrLn $ "Not installing "++dst
|
||||||
|
|
||||||
gf_logo = "gf0.png"
|
gf_logo = "gf0.png"
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
### This script builds a binary distribution of GF from the source
|
### This script builds a binary distribution of GF from the source
|
||||||
### package that this script is a part of. It assumes that you have installed
|
### package that this script is a part of. It assumes that you have installed
|
||||||
### the Haskell Platform, version 2013.2.0.0 or 2012.4.0.0.
|
### a recent version of the Haskell Platform.
|
||||||
### Two binary package formats are supported: plain tar files (.tar.gz) and
|
### Two binary package formats are supported: plain tar files (.tar.gz) and
|
||||||
### OS X Installer packages (.pkg).
|
### OS X Installer packages (.pkg).
|
||||||
|
|
||||||
@@ -16,13 +16,14 @@ name="gf-$ver"
|
|||||||
destdir="$PWD/dist/$name" # assemble binary dist here
|
destdir="$PWD/dist/$name" # assemble binary dist here
|
||||||
prefix=${PREFIX:-/usr/local} # where to install
|
prefix=${PREFIX:-/usr/local} # where to install
|
||||||
fmt=${FMT:-tar.gz} # binary package format (tar.gz or pkg)
|
fmt=${FMT:-tar.gz} # binary package format (tar.gz or pkg)
|
||||||
|
ghc=${GHC:-ghc} # which Haskell compiler to use
|
||||||
|
|
||||||
extralib="$destdir$prefix/lib"
|
extralib="$destdir$prefix/lib"
|
||||||
extrainclude="$destdir$prefix/include"
|
extrainclude="$destdir$prefix/include"
|
||||||
extra="--extra-lib-dirs=$extralib --extra-include-dirs=$extrainclude"
|
extra="--extra-lib-dirs=$extralib --extra-include-dirs=$extrainclude"
|
||||||
|
|
||||||
set -e # Stop if an error occurs
|
set -e # Stop if an error occurs
|
||||||
set -x # print commands before exuting them
|
set -x # print commands before executing them
|
||||||
|
|
||||||
## First configure & build the C run-time system
|
## First configure & build the C run-time system
|
||||||
pushd src/runtime/c
|
pushd src/runtime/c
|
||||||
@@ -64,8 +65,8 @@ else
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
## Build GF, with C run-time support enabled
|
## Build GF, with C run-time support enabled
|
||||||
cabal install --only-dependencies -fserver -fc-runtime $extra
|
cabal install -w "$ghc" --only-dependencies -fserver -fc-runtime $extra
|
||||||
cabal configure --prefix="$prefix" -fserver -fc-runtime $extra
|
cabal configure -w "$ghc" --prefix="$prefix" -fserver -fc-runtime $extra
|
||||||
DYLD_LIBRARY_PATH="$extralib" LD_LIBRARY_PATH="$extralib" cabal build
|
DYLD_LIBRARY_PATH="$extralib" LD_LIBRARY_PATH="$extralib" cabal build
|
||||||
# Building the example grammars will fail, because the RGL is missing
|
# Building the example grammars will fail, because the RGL is missing
|
||||||
cabal copy --destdir="$destdir" # create www directory
|
cabal copy --destdir="$destdir" # create www directory
|
||||||
|
|||||||
20
bin/clean_html
Executable file
20
bin/clean_html
Executable file
@@ -0,0 +1,20 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# This script finds all .t2t (txt2tags) and .md (Markdown) files
|
||||||
|
# and deletes the corresponding HTML file of the same name.
|
||||||
|
|
||||||
|
find . -name '*.t2t' | while read t2t ; do
|
||||||
|
html="${t2t%.t2t}.html"
|
||||||
|
if [ -f "$html" ] ; then
|
||||||
|
echo "$html"
|
||||||
|
rm -f "$html"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
find . -name '*.md' | while read md ; do
|
||||||
|
html="${md%.md}.html"
|
||||||
|
if [ -f "$html" ] ; then
|
||||||
|
echo "$html"
|
||||||
|
rm -f "$html"
|
||||||
|
fi
|
||||||
|
done
|
||||||
146
bin/template.html
Normal file
146
bin/template.html
Normal file
@@ -0,0 +1,146 @@
|
|||||||
|
<!DOCTYPE html>
|
||||||
|
<html xmlns="http://www.w3.org/1999/xhtml" lang="$lang$" xml:lang="$lang$"$if(dir)$ dir="$dir$"$endif$>
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8" />
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||||
|
$for(author-meta)$
|
||||||
|
<meta name="author" content="$author-meta$" />
|
||||||
|
$endfor$
|
||||||
|
$if(date-meta)$
|
||||||
|
<meta name="dcterms.date" content="$date-meta$" />
|
||||||
|
$endif$
|
||||||
|
$if(keywords)$
|
||||||
|
<meta name="keywords" content="$for(keywords)$$keywords$$sep$, $endfor$" />
|
||||||
|
$endif$
|
||||||
|
<title>$if(title-prefix)$$title-prefix$ – $endif$$pagetitle$</title>
|
||||||
|
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.1.3/css/bootstrap.min.css" integrity="sha384-MCw98/SFnGE8fJT3GXwEOngsV7Zt27NXFoaoApmYm81iuXoPkFOJwJ8ERdknLPMO" crossorigin="anonymous">
|
||||||
|
<link rel="stylesheet" href="https://use.fontawesome.com/releases/v5.4.2/css/all.css" integrity="sha384-/rXc/GQVaYpyDdyxK+ecHPVYJSN9bmVFBvjA/9eOB+pb3F2w2N6fc5qB9Ew5yIns" crossorigin="anonymous">
|
||||||
|
$for(css)$
|
||||||
|
<link rel="stylesheet" href="$css$" />
|
||||||
|
$endfor$
|
||||||
|
$if(math)$
|
||||||
|
$math$
|
||||||
|
$endif$
|
||||||
|
<!--[if lt IE 9]>
|
||||||
|
<script src="//cdnjs.cloudflare.com/ajax/libs/html5shiv/3.7.3/html5shiv-printshiv.min.js"></script>
|
||||||
|
<![endif]-->
|
||||||
|
$for(header-includes)$
|
||||||
|
$header-includes$
|
||||||
|
$endfor$
|
||||||
|
</head>
|
||||||
|
<body class="bg-light">
|
||||||
|
<div class="bg-white pb-5">
|
||||||
|
$for(include-before)$
|
||||||
|
$include-before$
|
||||||
|
$endfor$
|
||||||
|
<div class="container-fluid py-5" style="max-width:1200px">
|
||||||
|
|
||||||
|
$if(title)$
|
||||||
|
<header id="title-block-header">
|
||||||
|
<a href="$rel-root$" title="Home">
|
||||||
|
<img src="$rel-root$/doc/Logos/gf1.svg" height="200" class="float-md-right ml-3 mb-3 bg-white" alt="GF Logo">
|
||||||
|
</a>
|
||||||
|
<h1 class="title">$title$</h1>
|
||||||
|
$if(subtitle)$
|
||||||
|
<p class="subtitle">$subtitle$</p>
|
||||||
|
$endif$
|
||||||
|
$for(author)$
|
||||||
|
<p class="author">$author$</p>
|
||||||
|
$endfor$
|
||||||
|
$if(date)$
|
||||||
|
<p class="date">$date$</p>
|
||||||
|
$endif$
|
||||||
|
</header>
|
||||||
|
$endif$
|
||||||
|
$if(toc)$
|
||||||
|
<nav id="$idprefix$TOC">
|
||||||
|
$if(table-of-contents)$
|
||||||
|
<!-- pandoc >= 2.0 -->
|
||||||
|
$table-of-contents$
|
||||||
|
$else$
|
||||||
|
<!-- pandoc < 2.0 -->
|
||||||
|
$toc$
|
||||||
|
$endif$
|
||||||
|
</nav>
|
||||||
|
$endif$
|
||||||
|
$body$
|
||||||
|
</div><!-- .container -->
|
||||||
|
</div><!-- .bg-white -->
|
||||||
|
|
||||||
|
<footer class="py-5">
|
||||||
|
<div class="container">
|
||||||
|
<div class="row">
|
||||||
|
|
||||||
|
<div class="col-6 col-sm-3">
|
||||||
|
<a href="$rel-root$">
|
||||||
|
<i class="fas fa-home"></i>
|
||||||
|
Home
|
||||||
|
</a>
|
||||||
|
<h6 class="text-muted mt-3">Get started</h6>
|
||||||
|
<ul class="list-unstyled">
|
||||||
|
<li><a href="https://www.youtube.com/watch?v=x1LFbDQhbso">Google Tech Talk</a></li>
|
||||||
|
<li><a href="http://cloud.grammaticalframework.org/">GF Cloud</a></li>
|
||||||
|
<li>
|
||||||
|
<a href="$rel-root$/doc/tutorial/gf-tutorial.html">Tutorial</a>
|
||||||
|
/
|
||||||
|
<a href="$rel-root$/lib/doc/rgl-tutorial/index.html">RGL Tutorial</a>
|
||||||
|
</li>
|
||||||
|
<li><a href="$rel-root$/download"><strong>Download GF</strong></a></li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="col-6 col-sm-3">
|
||||||
|
<h6 class="text-muted">Learn more</h6>
|
||||||
|
<ul class="list-unstyled">
|
||||||
|
<li><a href="$rel-root$/gf-book">The GF Book</a></li>
|
||||||
|
<li><a href="$rel-root$/doc/gf-refman.html">Reference Manual</a></li>
|
||||||
|
<li><a href="$rel-root$/doc/gf-shell-reference.html">GF Shell Reference</a></li>
|
||||||
|
<li><a href="http://www.molto-project.eu/sites/default/files/MOLTO_D2.3.pdf">Best Practices</a></li>
|
||||||
|
<li><a href="$rel-root$/lib/doc/synopsis/index.html"><strong>RGL Synopsis</strong></a></li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="col-6 col-sm-3">
|
||||||
|
<h6 class="text-muted">Develop</h6>
|
||||||
|
<ul class="list-unstyled">
|
||||||
|
<li><a href="$rel-root$/doc/gf-developers.html">Developers Guide</a></li>
|
||||||
|
<li><a href="http://hackage.haskell.org/package/gf/docs/PGF.html">PGF library API (Haskell runtime)</a></li>
|
||||||
|
<li><a href="$rel-root$/doc/runtime-api.html">PGF library API (C runtime)</a></li>
|
||||||
|
<li><a href="http://hackage.haskell.org/package/gf/docs/GF.html">GF compiler API</a></li>
|
||||||
|
<li><a href="$rel-root$/doc/gf-editor-modes.html">Text Editor Support</a></li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="col-6 col-sm-3">
|
||||||
|
<h6 class="text-muted">Contribute</h6>
|
||||||
|
<ul class="list-unstyled">
|
||||||
|
<li><a href="http://groups.google.com/group/gf-dev">Mailing List</a></li>
|
||||||
|
<li><a href="https://github.com/GrammaticalFramework/gf-core/issues">Issue Tracker</a></li>
|
||||||
|
<li><a href="$rel-root$/doc/gf-people.html">Authors</a></li>
|
||||||
|
<li><a href="http://school.grammaticalframework.org/2018/">Summer School</a></li>
|
||||||
|
</ul>
|
||||||
|
<h6 class="text-muted">
|
||||||
|
Repositories
|
||||||
|
<i class="fab fa-github ml-1"></i>
|
||||||
|
</h6>
|
||||||
|
<a href="https://github.com/GrammaticalFramework/gf-core">GF</a> ·
|
||||||
|
<a href="https://github.com/GrammaticalFramework/gf-rgl">RGL</a> ·
|
||||||
|
<a href="https://github.com/GrammaticalFramework/gf-contrib">Contributions</a>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</footer>
|
||||||
|
$for(include-after)$
|
||||||
|
$include-after$
|
||||||
|
$endfor$
|
||||||
|
<script type="text/javascript">
|
||||||
|
var gaJsHost = (("https:" == document.location.protocol) ? "https://ssl." : "http://www.");
|
||||||
|
document.write(unescape("%3Cscript src='" + gaJsHost + "google-analytics.com/ga.js' type='text/javascript'%3E%3C/script%3E"));
|
||||||
|
</script>
|
||||||
|
<script type="text/javascript">
|
||||||
|
try {
|
||||||
|
var pageTracker = _gat._getTracker("UA-7811807-3");
|
||||||
|
pageTracker._trackPageview();
|
||||||
|
} catch(err) {}</script>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
159
bin/update_html
159
bin/update_html
@@ -1,11 +1,156 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
### This script finds all .t2t (txt2tags) files and updates the corresponding
|
# Generate HTML from txt2tags (.t2t) and Markdown (.md)
|
||||||
### .html file, if it is out-of-date.
|
# Usage:
|
||||||
|
# - update_html
|
||||||
|
# Look for all .t2t and .md files in the current directory and below,
|
||||||
|
# generating the output HTML when the source is newer than the HTML.
|
||||||
|
# - update_html path/to/file.t2t path/to/another.md
|
||||||
|
# Generate HTML for the specified file(s), ignoring modification time.
|
||||||
|
#
|
||||||
|
# Requires:
|
||||||
|
# - txt2tags for .t2t files. Tested with 2.6.
|
||||||
|
# - pandoc for both .t2t and .md files. Tested with 1.16.0.2 and 2.3.1.
|
||||||
|
# - the template file `template.html` in the same directory as this script.
|
||||||
|
#
|
||||||
|
# Tested with Ubuntu 16.04 and macOS Mojave.
|
||||||
|
#
|
||||||
|
# See also clean_html for removing the files generated by this script.
|
||||||
|
|
||||||
find . -name '*.t2t' | while read t2t ; do
|
# Path to directory where this script is
|
||||||
html="${t2t%.t2t}.html"
|
# https://stackoverflow.com/a/246128/98600
|
||||||
if [ "$t2t" -nt "$html" ] ; then
|
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
|
||||||
txt2tags -thtml "$t2t"
|
|
||||||
|
# HTML template
|
||||||
|
template="$DIR/template.html"
|
||||||
|
|
||||||
|
# Render txt2tags into html file
|
||||||
|
# Arguments:
|
||||||
|
# 1. txt2tags source file, e.g. download/index.t2t
|
||||||
|
# 2. html target file, e.g. download/index.html
|
||||||
|
function render_t2t_html {
|
||||||
|
t2t="$1"
|
||||||
|
html="$2"
|
||||||
|
tmp="$2.tmp"
|
||||||
|
relroot="$( dirname $t2t | sed -E 's/^.\///' | sed -E 's/[^/]+/../g' )"
|
||||||
|
|
||||||
|
# First render with txt2tags to handle pre/post processing
|
||||||
|
txt2tags \
|
||||||
|
--target=html \
|
||||||
|
--no-headers \
|
||||||
|
--quiet \
|
||||||
|
--outfile="$tmp" \
|
||||||
|
--infile="$t2t"
|
||||||
|
|
||||||
|
# Replace <A NAME="toc3"></A> with <div id="toc3"></div> so that Pandoc retains it
|
||||||
|
# Do this for both cases since BSD sed doesn't support /i
|
||||||
|
sed -i.bak "s/<a name=\"\(.*\)\"><\/a>/<div id=\"\1\"><\/div>/" "$tmp"
|
||||||
|
sed -i.bak "s/<A NAME=\"\(.*\)\"><\/A>/<div id=\"\1\"><\/div>/" "$tmp"
|
||||||
|
rm -f "$tmp.bak"
|
||||||
|
|
||||||
|
# Capture first 3 lines of t2t file: title, author, date
|
||||||
|
# Documentation here: https://txt2tags.org/userguide/headerarea
|
||||||
|
l1=$(head -n 1 "$t2t")
|
||||||
|
l2=$(tail -n+2 "$t2t" | head -n 1)
|
||||||
|
l3=$(tail -n+3 "$t2t" | head -n 1)
|
||||||
|
title=
|
||||||
|
author=
|
||||||
|
date=
|
||||||
|
if [ -n "$l1" ] ; then
|
||||||
|
title="$l1"
|
||||||
|
if [ -n "$l2" ] ; then author="$l2" ; fi
|
||||||
|
if [ -n "$l3" ] ; then date="$l3" ; fi
|
||||||
fi
|
fi
|
||||||
done
|
|
||||||
|
# Run txt2tag's HTML through Pandoc for cleanup
|
||||||
|
pandoc \
|
||||||
|
--from=html \
|
||||||
|
--to=html5 \
|
||||||
|
--standalone \
|
||||||
|
--template="$template" \
|
||||||
|
--variable="lang:en" \
|
||||||
|
--variable="rel-root:$relroot" \
|
||||||
|
--metadata="title:$title" \
|
||||||
|
--metadata="author:$author" \
|
||||||
|
--metadata="date:$date" \
|
||||||
|
"$tmp" \
|
||||||
|
--output="$html"
|
||||||
|
rm -f "$tmp"
|
||||||
|
|
||||||
|
# Final post-processing
|
||||||
|
if [ -f "$html" ] ; then
|
||||||
|
sed -i.bak "s/<table/<table class=\"table\"/" "$html" && rm "$html.bak"
|
||||||
|
echo "$html"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Render markdown into html file
|
||||||
|
# Arguments:
|
||||||
|
# 1. markdown source file, e.g. download/index.md
|
||||||
|
# 2. html target file, e.g. download/index.html
|
||||||
|
function render_md_html {
|
||||||
|
md="$1"
|
||||||
|
html="$2"
|
||||||
|
relroot="$( dirname $md | sed -E 's/^.\///' | sed -E 's/[^/]+/../g' )"
|
||||||
|
|
||||||
|
# Look for `show-toc: true` in metadata (first ten lines of file)
|
||||||
|
if head -n 10 "$md" | grep --quiet 'show-toc: true' ; then
|
||||||
|
tocflag='--table-of-contents'
|
||||||
|
else
|
||||||
|
tocflag=''
|
||||||
|
fi
|
||||||
|
|
||||||
|
pandoc \
|
||||||
|
--from=markdown \
|
||||||
|
--to=html5 \
|
||||||
|
--standalone \
|
||||||
|
$tocflag \
|
||||||
|
--template="$template" \
|
||||||
|
--variable="lang:en" \
|
||||||
|
--variable="rel-root:$relroot" \
|
||||||
|
"$md" \
|
||||||
|
--output="$html"
|
||||||
|
|
||||||
|
# Final post-processing
|
||||||
|
if [ -f "$html" ] ; then
|
||||||
|
# add "table" class to tables
|
||||||
|
sed -i.bak "s/<table/<table class=\"table\"/" "$html"
|
||||||
|
# rewrite anchors that Pandoc 1.16 ignores: [content]{#anchor} -> <span id="anchor">content</span>
|
||||||
|
sed -i.bak -E "s/\[(.*)\]\{#(.+)\}/<span id=\"\2\">\1<\/span>/" "$html"
|
||||||
|
rm -f "$html.bak"
|
||||||
|
echo "$html"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Main entry point
|
||||||
|
# Script can be run in one of two modes:
|
||||||
|
if [ $# -gt 0 ] ; then
|
||||||
|
# Render specific file(s) from args, ignoring dates
|
||||||
|
for file in "$@" ; do
|
||||||
|
ext="${file##*.}"
|
||||||
|
html="${file%.$ext}.html"
|
||||||
|
case $ext in
|
||||||
|
"md")
|
||||||
|
render_md_html "$file" "$html"
|
||||||
|
;;
|
||||||
|
"t2t")
|
||||||
|
render_t2t_html "$file" "$html"
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
else
|
||||||
|
# Render all files found in cwd and deeper if source is newer
|
||||||
|
find . -name '*.t2t' | while read file ; do
|
||||||
|
html="${file%.t2t}.html"
|
||||||
|
if [ "$file" -nt "$html" ] || [ "$template" -nt "$html" ] ; then
|
||||||
|
render_t2t_html "$file" "$html"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
find . -name '*.md' | while read file ; do
|
||||||
|
if [[ "$file" == *"README.md" ]] ; then continue ; fi
|
||||||
|
html="${file%.md}.html"
|
||||||
|
if [ "$file" -nt "$html" ] || [ "$template" -nt "$html" ] ; then
|
||||||
|
render_md_html "$file" "$html"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|||||||
18
debian/changelog
vendored
18
debian/changelog
vendored
@@ -1,3 +1,21 @@
|
|||||||
|
gf (3.10.3-1) xenial bionic cosmic; urgency=low
|
||||||
|
|
||||||
|
* GF 3.10.3
|
||||||
|
|
||||||
|
-- Thomas Hallgren <hallgren@chalmers.se> Fri, 5 Mar 2019 19:30:00 +0100
|
||||||
|
|
||||||
|
gf (3.10-2) xenial bionic cosmic; urgency=low
|
||||||
|
|
||||||
|
* GF 3.10
|
||||||
|
|
||||||
|
-- Thomas Hallgren <hallgren@chalmers.se> Fri, 5 Mar 2019 16:00:00 +0100
|
||||||
|
|
||||||
|
gf (3.10-1) xenial bionic cosmic; urgency=low
|
||||||
|
|
||||||
|
* GF 3.10
|
||||||
|
|
||||||
|
-- Thomas Hallgren <hallgren@chalmers.se> Fri, 2 Dec 2018 15:00:00 +0100
|
||||||
|
|
||||||
gf (3.9-1) vivid xenial zesty; urgency=low
|
gf (3.9-1) vivid xenial zesty; urgency=low
|
||||||
|
|
||||||
* GF 3.9
|
* GF 3.9
|
||||||
|
|||||||
2
debian/control
vendored
2
debian/control
vendored
@@ -3,7 +3,7 @@ Section: devel
|
|||||||
Priority: optional
|
Priority: optional
|
||||||
Maintainer: Thomas Hallgren <hallgren@chalmers.se>
|
Maintainer: Thomas Hallgren <hallgren@chalmers.se>
|
||||||
Standards-Version: 3.9.2
|
Standards-Version: 3.9.2
|
||||||
Build-Depends: debhelper (>= 5), haskell-platform (>= 2011.2.0.1), libghc-haskeline-dev, libghc-mtl-dev, libghc-json-dev, autoconf, automake, libtool-bin, python-dev, java-sdk, txt2tags
|
Build-Depends: debhelper (>= 5), haskell-platform (>= 2011.2.0.1), libghc-haskeline-dev, libghc-mtl-dev, libghc-json-dev, autoconf, automake, libtool-bin, python-dev, java-sdk, txt2tags, pandoc
|
||||||
Homepage: http://www.grammaticalframework.org/
|
Homepage: http://www.grammaticalframework.org/
|
||||||
|
|
||||||
Package: gf
|
Package: gf
|
||||||
|
|||||||
33
debian/rules
vendored
Normal file → Executable file
33
debian/rules
vendored
Normal file → Executable file
@@ -13,19 +13,6 @@
|
|||||||
override_dh_shlibdeps:
|
override_dh_shlibdeps:
|
||||||
dh_shlibdeps --dpkg-shlibdeps-params=--ignore-missing-info
|
dh_shlibdeps --dpkg-shlibdeps-params=--ignore-missing-info
|
||||||
|
|
||||||
override_dh_auto_build:
|
|
||||||
cd src/runtime/python && EXTRA_INCLUDE_DIRS=$(CURDIR)/src/runtime/c EXTRA_LIB_DIRS=$(CURDIR)/src/runtime/c/.libs python setup.py build
|
|
||||||
cd src/runtime/java && make CFLAGS="-I$(CURDIR)/src/runtime/c -L$(CURDIR)/src/runtime/c/.libs" INSTALL_PATH=/usr/lib
|
|
||||||
echo LD_LIBRARY_PATH=$$LD_LIBRARY_PATH:$(CURDIR)/src/runtime/c/.libs
|
|
||||||
LD_LIBRARY_PATH=$$LD_LIBRARY_PATH:$(CURDIR)/src/runtime/c/.libs cabal build
|
|
||||||
make html
|
|
||||||
|
|
||||||
override_dh_auto_clean:
|
|
||||||
rm -fr dist/build
|
|
||||||
-cd src/runtime/python && rm -fr build
|
|
||||||
-cd src/runtime/java && make clean
|
|
||||||
-cd src/runtime/c && make clean
|
|
||||||
|
|
||||||
override_dh_auto_configure:
|
override_dh_auto_configure:
|
||||||
cd src/runtime/c && bash setup.sh configure --prefix=/usr
|
cd src/runtime/c && bash setup.sh configure --prefix=/usr
|
||||||
cd src/runtime/c && bash setup.sh build
|
cd src/runtime/c && bash setup.sh build
|
||||||
@@ -33,13 +20,31 @@ override_dh_auto_configure:
|
|||||||
cabal install --only-dependencies
|
cabal install --only-dependencies
|
||||||
cabal configure --prefix=/usr -fserver -fc-runtime --extra-lib-dirs=$(CURDIR)/src/runtime/c/.libs --extra-include-dirs=$(CURDIR)/src/runtime/c
|
cabal configure --prefix=/usr -fserver -fc-runtime --extra-lib-dirs=$(CURDIR)/src/runtime/c/.libs --extra-include-dirs=$(CURDIR)/src/runtime/c
|
||||||
|
|
||||||
|
SET_LDL=LD_LIBRARY_PATH=$$LD_LIBRARY_PATH:$(CURDIR)/src/runtime/c/.libs
|
||||||
|
|
||||||
|
override_dh_auto_build:
|
||||||
|
cd src/runtime/python && EXTRA_INCLUDE_DIRS=$(CURDIR)/src/runtime/c EXTRA_LIB_DIRS=$(CURDIR)/src/runtime/c/.libs python setup.py build
|
||||||
|
cd src/runtime/java && make CFLAGS="-I$(CURDIR)/src/runtime/c -L$(CURDIR)/src/runtime/c/.libs" INSTALL_PATH=/usr/lib
|
||||||
|
echo $(SET_LDL)
|
||||||
|
$(SET_LDL) cabal build # builds gf, fails to build example grammars
|
||||||
|
PATH=$(CURDIR)/dist/build/gf:$$PATH && make -C ../gf-rgl build
|
||||||
|
GF_LIB_PATH=$(CURDIR)/../gf-rgl/dist $(SET_LDL) cabal build # have RGL now, ok to build example grammars
|
||||||
|
make html
|
||||||
|
|
||||||
override_dh_auto_install:
|
override_dh_auto_install:
|
||||||
LD_LIBRARY_PATH=$$LD_LIBRARY_PATH:$(CURDIR)/src/runtime/c/.libs cabal copy --destdir=$(CURDIR)/debian/gf
|
$(SET_LDL) cabal copy --destdir=$(CURDIR)/debian/gf # creates www directory
|
||||||
|
export GF_LIB_PATH="$$(dirname $$(find "$(CURDIR)/debian/gf" -name www))/lib" && echo "GF_LIB_PATH=$$GF_LIB_PATH" && mkdir -p "$$GF_LIB_PATH" && make -C ../gf-rgl copy
|
||||||
cd src/runtime/c && bash setup.sh copy prefix=$(CURDIR)/debian/gf/usr
|
cd src/runtime/c && bash setup.sh copy prefix=$(CURDIR)/debian/gf/usr
|
||||||
cd src/runtime/python && python setup.py install --prefix=$(CURDIR)/debian/gf/usr
|
cd src/runtime/python && python setup.py install --prefix=$(CURDIR)/debian/gf/usr
|
||||||
cd src/runtime/java && make INSTALL_PATH=$(CURDIR)/debian/gf/usr/lib install
|
cd src/runtime/java && make INSTALL_PATH=$(CURDIR)/debian/gf/usr/lib install
|
||||||
D="`find debian/gf -name site-packages`" && [ -n "$$D" ] && cd $$D && cd .. && mv site-packages dist-packages
|
D="`find debian/gf -name site-packages`" && [ -n "$$D" ] && cd $$D && cd .. && mv site-packages dist-packages
|
||||||
|
|
||||||
|
override_dh_auto_clean:
|
||||||
|
rm -fr dist/build
|
||||||
|
-cd src/runtime/python && rm -fr build
|
||||||
|
-cd src/runtime/java && make clean
|
||||||
|
-cd src/runtime/c && make clean
|
||||||
|
|
||||||
override_dh_auto_test:
|
override_dh_auto_test:
|
||||||
ifneq (nocheck,$(filter nocheck,$(DEB_BUILD_OPTIONS)))
|
ifneq (nocheck,$(filter nocheck,$(DEB_BUILD_OPTIONS)))
|
||||||
true
|
true
|
||||||
|
|||||||
15
doc/Makefile
15
doc/Makefile
@@ -1,18 +1,3 @@
|
|||||||
resource:
|
|
||||||
gfdoc -txt2 ../lib/resource-1.0/abstract/*.gf
|
|
||||||
gfdoc -txt2 ../lib/resource-1.0/*/Paradigms*.gf
|
|
||||||
txt2tags --toc resource.txt
|
|
||||||
# cat resource-preamble resource.tex >final-resource.tex
|
|
||||||
sed -i 's/\\docum/%\\docum/g' resource.tex
|
|
||||||
sed -i 's/ion\*{/ion{/g' resource.tex
|
|
||||||
sed -i 's/\\paragraph{}//g' resource.tex
|
|
||||||
sed -i 's/}\\\\/}/g' resource.tex
|
|
||||||
cat resource-preamble resource.tex >resource.tmp
|
|
||||||
mv resource.tmp resource.tex
|
|
||||||
latex resource.tex
|
|
||||||
latex resource.tex
|
|
||||||
dvipdf resource.dvi
|
|
||||||
|
|
||||||
gf-help-full.txt::
|
gf-help-full.txt::
|
||||||
{ echo ; echo ; echo ; } > $@
|
{ echo ; echo ; echo ; } > $@
|
||||||
echo help -full -t2t | gf -run >> $@
|
echo help -full -t2t | gf -run >> $@
|
||||||
|
|||||||
551
doc/error-messages.txt
Normal file
551
doc/error-messages.txt
Normal file
@@ -0,0 +1,551 @@
|
|||||||
|
Compiler.hs
|
||||||
|
mainGFC :: Options -> [FilePath] -> IO ()
|
||||||
|
_ | null fs -> fail $ "No input files."
|
||||||
|
_ | all (extensionIs ".pgf") fs -> unionPGFFiles opts fs
|
||||||
|
_ -> fail $ "Don't know what to do with these input files: " ++ unwords fs)
|
||||||
|
|
||||||
|
|
||||||
|
----------------------------------------
|
||||||
|
Compile.hs
|
||||||
|
|
||||||
|
compileModule
|
||||||
|
case length file1s of
|
||||||
|
0 -> raise (render ("Unable to find: " $$ nest 2 candidates))
|
||||||
|
1 -> do return $ head file1s
|
||||||
|
_ -> do putIfVerb opts1 ("matched multiple candidates: " +++ show file1s)
|
||||||
|
return $ head file1s
|
||||||
|
else raise (render ("File" <+> file <+> "does not exist"))
|
||||||
|
|
||||||
|
---------------------------------------
|
||||||
|
Grammar.Lexer.x
|
||||||
|
token :: P Token
|
||||||
|
AlexError (AI pos _ _) -> PFailed pos "lexical error"
|
||||||
|
|
||||||
|
|
||||||
|
---------------------------------------
|
||||||
|
Grammar.Parser.y
|
||||||
|
|
||||||
|
happyError = fail "syntax error"
|
||||||
|
|
||||||
|
tryLoc (c,mty,Just e) = return (c,(mty,e))
|
||||||
|
tryLoc (c,_ ,_ ) = fail ("local definition of" +++ showIdent c +++ "without value")
|
||||||
|
|
||||||
|
mkR [] = return $ RecType [] --- empty record always interpreted as record type
|
||||||
|
mkR fs@(f:_) =
|
||||||
|
case f of
|
||||||
|
(lab,Just ty,Nothing) -> mapM tryRT fs >>= return . RecType
|
||||||
|
_ -> mapM tryR fs >>= return . R
|
||||||
|
where
|
||||||
|
tryRT (lab,Just ty,Nothing) = return (ident2label lab,ty)
|
||||||
|
tryRT (lab,_ ,_ ) = fail $ "illegal record type field" +++ showIdent lab --- manifest fields ?!
|
||||||
|
|
||||||
|
tryR (lab,mty,Just t) = return (ident2label lab,(mty,t))
|
||||||
|
tryR (lab,_ ,_ ) = fail $ "illegal record field" +++ showIdent lab
|
||||||
|
|
||||||
|
|
||||||
|
---------------------------------------
|
||||||
|
ModDeps.hs
|
||||||
|
|
||||||
|
mkSourceGrammar :: [SourceModule] -> Err SourceGrammar
|
||||||
|
deplist <- either
|
||||||
|
return
|
||||||
|
(\ms -> Bad $ "circular modules" +++ unwords (map show ms)) $
|
||||||
|
|
||||||
|
|
||||||
|
checkUniqueImportNames :: [Ident] -> SourceModInfo -> Err ()
|
||||||
|
test ms = testErr (all (`notElem` ns) ms)
|
||||||
|
("import names clashing with module names among" +++ unwords (map prt ms))
|
||||||
|
|
||||||
|
|
||||||
|
moduleDeps :: [SourceModule] -> Err Dependencies
|
||||||
|
deps (c,m) = errIn ("checking dependencies of module" +++ prt c) $ case mtype m of
|
||||||
|
MTConcrete a -> do
|
||||||
|
am <- lookupModuleType gr a
|
||||||
|
testErr (mtype am == MTAbstract) "the of-module is not an abstract syntax"
|
||||||
|
|
||||||
|
testErr (all (compatMType ety . mtype) ests) "inappropriate extension module type"
|
||||||
|
|
||||||
|
|
||||||
|
---------------------------------------
|
||||||
|
Update.hs
|
||||||
|
|
||||||
|
buildAnyTree
|
||||||
|
Just i -> case unifyAnyInfo m i j of
|
||||||
|
Ok k -> go (Map.insert c k map) is
|
||||||
|
Bad _ -> fail $ render ("conflicting information in module"<+>m $$
|
||||||
|
nest 4 (ppJudgement Qualified (c,i)) $$
|
||||||
|
"and" $+$
|
||||||
|
nest 4 (ppJudgement Qualified (c,j)))
|
||||||
|
extendModule
|
||||||
|
unless (sameMType (mtype m) (mtype mo))
|
||||||
|
(checkError ("illegal extension type to module" <+> name))
|
||||||
|
|
||||||
|
rebuildModule
|
||||||
|
unless (null is || mstatus mi == MSIncomplete)
|
||||||
|
(checkError ("module" <+> i <+>
|
||||||
|
"has open interfaces and must therefore be declared incomplete"))
|
||||||
|
|
||||||
|
unless (isModRes m1)
|
||||||
|
(checkError ("interface expected instead of" <+> i0))
|
||||||
|
js' <- extendMod gr False ((i0,m1), isInherited mincl) i (jments mi)
|
||||||
|
|
||||||
|
unless (stat' == MSComplete || stat == MSIncomplete)
|
||||||
|
(checkError ("module" <+> i <+> "remains incomplete"))
|
||||||
|
|
||||||
|
|
||||||
|
extendMod
|
||||||
|
checkError ("cannot unify the information" $$
|
||||||
|
nest 4 (ppJudgement Qualified (c,i)) $$
|
||||||
|
"in module" <+> name <+> "with" $$
|
||||||
|
nest 4 (ppJudgement Qualified (c,j)) $$
|
||||||
|
"in module" <+> base)
|
||||||
|
|
||||||
|
unifyAnyInfo
|
||||||
|
(ResValue (L l1 t1), ResValue (L l2 t2))
|
||||||
|
| t1==t2 -> return (ResValue (L l1 t1))
|
||||||
|
| otherwise -> fail ""
|
||||||
|
|
||||||
|
(AnyInd b1 m1, AnyInd b2 m2) -> do
|
||||||
|
testErr (b1 == b2) $ "indirection status"
|
||||||
|
testErr (m1 == m2) $ "different sources of indirection"
|
||||||
|
|
||||||
|
unifAbsDefs _ _ = fail ""
|
||||||
|
|
||||||
|
----------------------------------
|
||||||
|
|
||||||
|
Rename.hs
|
||||||
|
|
||||||
|
renameIdentTerm'
|
||||||
|
_ -> case lookupTreeManyAll showIdent opens c of
|
||||||
|
[f] -> return (f c)
|
||||||
|
[] -> alt c ("constant not found:" <+> c $$
|
||||||
|
"given" <+> fsep (punctuate ',' (map fst qualifs)))
|
||||||
|
|
||||||
|
ts@(t:_) -> do checkWarn ("atomic term" <+> ppTerm Qualified 0 t0 $$
|
||||||
|
"conflict" <+> hsep (punctuate ',' (map (ppTerm Qualified 0) ts)) $$
|
||||||
|
"given" <+> fsep (punctuate ',' (map fst qualifs)))
|
||||||
|
return t
|
||||||
|
|
||||||
|
renameInfo
|
||||||
|
renLoc ren (L loc x) =
|
||||||
|
checkInModule cwd mi loc ("Happened in the renaming of" <+> i) $ do
|
||||||
|
|
||||||
|
renameTerm
|
||||||
|
| otherwise -> checks [ renid' (Q (MN r,label2ident l)) -- .. and qualified expression second.
|
||||||
|
, renid' t >>= \t -> return (P t l) -- try as a constant at the end
|
||||||
|
, checkError ("unknown qualified constant" <+> trm)
|
||||||
|
]
|
||||||
|
|
||||||
|
renamePattern env patt =
|
||||||
|
do r@(p',vs) <- renp patt
|
||||||
|
let dupl = vs \\ nub vs
|
||||||
|
unless (null dupl) $ checkError (hang ("[C.4.13] Pattern is not linear:") 4
|
||||||
|
patt)
|
||||||
|
return r
|
||||||
|
|
||||||
|
case c' of
|
||||||
|
Q d -> renp $ PM d
|
||||||
|
_ -> checkError ("unresolved pattern" <+> patt)
|
||||||
|
|
||||||
|
Q _ -> checkError ("data constructor expected but" <+> ppTerm Qualified 0 c' <+> "is found instead")
|
||||||
|
_ -> checkError ("unresolved data constructor" <+> ppTerm Qualified 0 c')
|
||||||
|
|
||||||
|
PM c -> do
|
||||||
|
x <- renid (Q c)
|
||||||
|
c' <- case x of
|
||||||
|
(Q c') -> return c'
|
||||||
|
_ -> checkError ("not a pattern macro" <+> ppPatt Qualified 0 patt)
|
||||||
|
|
||||||
|
PV x -> checks [ renid' (Vr x) >>= \t' -> case t' of
|
||||||
|
QC c -> return (PP c [],[])
|
||||||
|
_ -> checkError (pp "not a constructor")
|
||||||
|
, return (patt, [x])
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
-----------------------------------
|
||||||
|
CheckGrammar.hs
|
||||||
|
|
||||||
|
checkRestrictedInheritance :: FilePath -> SourceGrammar -> SourceModule -> Check ()
|
||||||
|
let illegals = [(f,is) |
|
||||||
|
(f,cs) <- allDeps, incld f, let is = filter illegal cs, not (null is)]
|
||||||
|
case illegals of
|
||||||
|
[] -> return ()
|
||||||
|
cs -> checkWarn ("In inherited module" <+> i <> ", dependence of excluded constants:" $$
|
||||||
|
nest 2 (vcat [f <+> "on" <+> fsep is | (f,is) <- cs]))
|
||||||
|
|
||||||
|
checkCompleteGrammar :: Options -> FilePath -> Grammar -> Module -> Module -> Check Module
|
||||||
|
case info of
|
||||||
|
CncCat (Just (L loc (RecType []))) _ _ _ _ -> return (foldr (\_ -> Abs Explicit identW) (R []) cxt)
|
||||||
|
_ -> Bad "no def lin"
|
||||||
|
|
||||||
|
where noLinOf c = checkWarn ("no linearization of" <+> c)
|
||||||
|
|
||||||
|
Ok (CncCat Nothing md mr mp mpmcfg) -> do
|
||||||
|
checkWarn ("no linearization type for" <+> c <> ", inserting default {s : Str}")
|
||||||
|
return $ updateTree (c,CncCat (Just (L NoLoc defLinType)) md mr mp mpmcfg) js
|
||||||
|
_ -> do
|
||||||
|
checkWarn ("no linearization type for" <+> c <> ", inserting default {s : Str}")
|
||||||
|
|
||||||
|
_ -> do checkWarn ("function" <+> c <+> "is not in abstract")
|
||||||
|
|
||||||
|
Ok (_,AbsFun {}) ->
|
||||||
|
checkError ("lincat:"<+>c<+>"is a fun, not a cat")
|
||||||
|
-}
|
||||||
|
_ -> do checkWarn ("category" <+> c <+> "is not in abstract")
|
||||||
|
|
||||||
|
checkInfo :: Options -> FilePath -> SourceGrammar -> SourceModule -> Ident -> Info -> Check Info
|
||||||
|
(Just (L loct ty), Nothing) -> do
|
||||||
|
chIn loct "operation" $
|
||||||
|
checkError (pp "No definition given to the operation")
|
||||||
|
|
||||||
|
ResOverload os tysts -> chIn NoLoc "overloading" $ do
|
||||||
|
|
||||||
|
checkUniq xss = case xss of
|
||||||
|
x:y:xs
|
||||||
|
| x == y -> checkError $ "ambiguous for type" <+>
|
||||||
|
ppType (mkFunType (tail x) (head x))
|
||||||
|
|
||||||
|
compAbsTyp g t = case t of
|
||||||
|
Vr x -> maybe (checkError ("no value given to variable" <+> x)) return $ lookup x g
|
||||||
|
|
||||||
|
checkReservedId x =
|
||||||
|
when (isReservedWord x) $
|
||||||
|
checkWarn ("reserved word used as identifier:" <+> x)
|
||||||
|
|
||||||
|
|
||||||
|
--------------------------------
|
||||||
|
TypeCheck/Abstract.hs
|
||||||
|
|
||||||
|
grammar2theory :: SourceGrammar -> Theory
|
||||||
|
Bad s -> case lookupCatContext gr m f of
|
||||||
|
Ok cont -> return $ cont2val cont
|
||||||
|
_ -> Bad s
|
||||||
|
|
||||||
|
|
||||||
|
--------------------------------
|
||||||
|
TypeCheck/ConcreteNew.hs
|
||||||
|
-- Concrete.hs has all its code commented out
|
||||||
|
|
||||||
|
|
||||||
|
--------------------------------
|
||||||
|
TypeCheck/RConcrete.hs
|
||||||
|
-- seems to be used more than ConcreteNew
|
||||||
|
|
||||||
|
computeLType :: SourceGrammar -> Context -> Type -> Check Type
|
||||||
|
AdHocOverload ts -> do
|
||||||
|
over <- getOverload gr g (Just typeType) t
|
||||||
|
case over of
|
||||||
|
Just (tr,_) -> return tr
|
||||||
|
_ -> checkError ("unresolved overloading of constants" <+> ppTerm Qualified 0 t)
|
||||||
|
|
||||||
|
inferLType :: SourceGrammar -> Context -> Term -> Check (Term, Type)
|
||||||
|
Q (m,ident) | isPredef m -> termWith trm $ case typPredefined ident of
|
||||||
|
Nothing -> checkError ("unknown in Predef:" <+> ident)
|
||||||
|
|
||||||
|
Q ident -> checks [
|
||||||
|
checkError ("cannot infer type of constant" <+> ppTerm Unqualified 0 trm)
|
||||||
|
]
|
||||||
|
|
||||||
|
QC ident -> checks [
|
||||||
|
checkError ("cannot infer type of canonical constant" <+> ppTerm Unqualified 0 trm)
|
||||||
|
]
|
||||||
|
|
||||||
|
Vr ident -> termWith trm $ checkLookup ident g
|
||||||
|
|
||||||
|
AdHocOverload ts -> do
|
||||||
|
_ -> checkError ("unresolved overloading of constants" <+> ppTerm Qualified 0 trm)
|
||||||
|
|
||||||
|
App f a -> do
|
||||||
|
case fty' of
|
||||||
|
Prod bt z arg val -> do
|
||||||
|
_ -> checkError ("A function type is expected for" <+> ppTerm Unqualified 0 f <+> "instead of type" <+> ppType fty)
|
||||||
|
|
||||||
|
S f x -> do
|
||||||
|
_ -> checkError ("table lintype expected for the table in" $$ nest 2 (ppTerm Unqualified 0 trm))
|
||||||
|
|
||||||
|
P t i -> do
|
||||||
|
Nothing -> checkError ("unknown label" <+> i <+> "in" $$ nest 2 (ppTerm Unqualified 0 ty'))
|
||||||
|
_ -> checkError ("record type expected for:" <+> ppTerm Unqualified 0 t $$
|
||||||
|
" instead of the inferred:" <+> ppTerm Unqualified 0 ty')
|
||||||
|
|
||||||
|
R r -> do
|
||||||
|
checkCond ("cannot infer type of record" $$ nest 2 (ppTerm Unqualified 0 trm)) (length ts == length fsts)
|
||||||
|
|
||||||
|
T ti pts -> do -- tries to guess: good in oper type inference
|
||||||
|
[] -> checkError ("cannot infer table type of" <+> ppTerm Unqualified 0 trm)
|
||||||
|
|
||||||
|
---- hack from Rename.identRenameTerm, to live with files with naming conflicts 18/6/2007
|
||||||
|
Strs (Cn c : ts) | c == cConflict -> do
|
||||||
|
checkWarn ("unresolved constant, could be any of" <+> hcat (map (ppTerm Unqualified 0) ts))
|
||||||
|
|
||||||
|
ExtR r s -> do
|
||||||
|
case (rT', sT') of
|
||||||
|
(RecType rs, RecType ss) -> do
|
||||||
|
_ -> checkError ("records or record types expected in" <+> ppTerm Unqualified 0 trm)
|
||||||
|
|
||||||
|
_ -> checkError ("cannot infer lintype of" <+> ppTerm Unqualified 0 trm)
|
||||||
|
|
||||||
|
|
||||||
|
getOverload :: SourceGrammar -> Context -> Maybe Type -> Term -> Check (Maybe (Term,Type))
|
||||||
|
matchOverload f typs ttys = do
|
||||||
|
checkWarn $ "ignoring lock fields in resolving" <+> ppTerm Unqualified 0 ot $$
|
||||||
|
"for" $$
|
||||||
|
nest 2 (showTypes tys) $$
|
||||||
|
"using" $$
|
||||||
|
nest 2 (showTypes pre)
|
||||||
|
([],[]) -> do
|
||||||
|
checkError $ "no overload instance of" <+> ppTerm Unqualified 0 f $$
|
||||||
|
"for" $$
|
||||||
|
nest 2 stysError $$
|
||||||
|
"among" $$
|
||||||
|
nest 2 (vcat stypsError) $$
|
||||||
|
maybe empty (\x -> "with value type" <+> ppType x) mt
|
||||||
|
([],[(val,fun)]) -> do
|
||||||
|
checkWarn ("ignoring lock fields in resolving" <+> ppTerm Unqualified 0 ot)
|
||||||
|
(nps1,nps2) -> do
|
||||||
|
checkWarn $ "ambiguous overloading of" <+> ppTerm Unqualified 0 f <+>
|
||||||
|
---- "with argument types" <+> hsep (map (ppTerm Qualified 0) tys) $$
|
||||||
|
"resolved by selecting the first of the alternatives" $$
|
||||||
|
nest 2 (vcat [ppTerm Qualified 0 fun | (_,ty,fun) <- vfs1 ++ if null vfs1 then vfs2 else []])
|
||||||
|
case [(mkApp fun tts,val) | (val,fun) <- nps1 ++ nps2] of
|
||||||
|
[] -> checkError $ "no alternatives left when resolving" <+> ppTerm Unqualified 0 f
|
||||||
|
|
||||||
|
checkLType :: SourceGrammar -> Context -> Term -> Type -> Check (Term, Type)
|
||||||
|
Abs bt x c -> do
|
||||||
|
case typ of
|
||||||
|
Prod bt' z a b -> do
|
||||||
|
_ -> checkError $ "function type expected instead of" <+> ppType typ
|
||||||
|
AdHocOverload ts -> do
|
||||||
|
_ -> checkError ("unresolved overloading of constants" <+> ppTerm Qualified 0 trm)
|
||||||
|
T _ [] ->
|
||||||
|
checkError ("found empty table in type" <+> ppTerm Unqualified 0 typ)
|
||||||
|
T _ cs -> case typ of
|
||||||
|
else checkWarn ("patterns never reached:" $$
|
||||||
|
nest 2 (vcat (map (ppPatt Unqualified 0) ps)))
|
||||||
|
_ -> checkError $ "table type expected for table instead of" $$ nest 2 (ppType typ)
|
||||||
|
V arg0 vs ->
|
||||||
|
if length vs1 == length vs
|
||||||
|
then return ()
|
||||||
|
else checkError $ "wrong number of values in table" <+> ppTerm Unqualified 0 trm
|
||||||
|
|
||||||
|
R r -> case typ of --- why needed? because inference may be too difficult
|
||||||
|
RecType rr -> do
|
||||||
|
_ -> checkError ("record type expected in type checking instead of" $$ nest 2 (ppTerm Unqualified 0 typ))
|
||||||
|
|
||||||
|
ExtR r s -> case typ of
|
||||||
|
case trm' of
|
||||||
|
RecType _ -> termWith trm' $ return typeType
|
||||||
|
ExtR (Vr _) (RecType _) -> termWith trm' $ return typeType
|
||||||
|
-- ext t = t ** ...
|
||||||
|
_ -> checkError ("invalid record type extension" <+> nest 2 (ppTerm Unqualified 0 trm))
|
||||||
|
|
||||||
|
case typ2 of
|
||||||
|
RecType ss -> return $ map fst ss
|
||||||
|
_ -> checkError ("cannot get labels from" $$ nest 2 (ppTerm Unqualified 0 typ2))
|
||||||
|
_ -> checkError ("record extension not meaningful for" <+> ppTerm Unqualified 0 typ)
|
||||||
|
|
||||||
|
S tab arg -> checks [ do
|
||||||
|
_ -> checkError ("table type expected for applied table instead of" <+> ppType ty')
|
||||||
|
|
||||||
|
_ -> do
|
||||||
|
(trm',ty') <- inferLType gr g trm
|
||||||
|
termWith trm' $ checkEqLType gr g typ ty' trm'
|
||||||
|
|
||||||
|
checkM rms (l,ty) = case lookup l rms of
|
||||||
|
_ -> checkError $
|
||||||
|
if isLockLabel l
|
||||||
|
then let cat = drop 5 (showIdent (label2ident l))
|
||||||
|
in ppTerm Unqualified 0 (R rms) <+> "is not in the lincat of" <+> cat <>
|
||||||
|
"; try wrapping it with lin" <+> cat
|
||||||
|
else "cannot find value for label" <+> l <+> "in" <+> ppTerm Unqualified 0 (R rms)
|
||||||
|
|
||||||
|
checkEqLType :: SourceGrammar -> Context -> Type -> Type -> Term -> Check Type
|
||||||
|
False -> checkError $ s <+> "type of" <+> ppTerm Unqualified 0 trm $$
|
||||||
|
"expected:" <+> ppTerm Qualified 0 t $$ -- ppqType t u $$
|
||||||
|
"inferred:" <+> ppTerm Qualified 0 u -- ppqType u t
|
||||||
|
|
||||||
|
checkIfEqLType :: SourceGrammar -> Context -> Type -> Type -> Term -> Check (Bool,Type,Type,String)
|
||||||
|
Ok lo -> do
|
||||||
|
checkWarn $ "missing lock field" <+> fsep lo
|
||||||
|
|
||||||
|
missingLock g t u = case (t,u) of
|
||||||
|
_:_ -> Bad $ render ("missing record fields:" <+> fsep (punctuate ',' (others)))
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
pattContext :: SourceGrammar -> Context -> Type -> Patt -> Check Context
|
||||||
|
checkCond ("wrong number of arguments for constructor in" <+> ppPatt Unqualified 0 p)
|
||||||
|
(length cont == length ps)
|
||||||
|
PR r -> do
|
||||||
|
_ -> checkError ("record type expected for pattern instead of" <+> ppTerm Unqualified 0 typ')
|
||||||
|
|
||||||
|
PAlt p' q -> do
|
||||||
|
g1 <- pattContext env g typ p'
|
||||||
|
g2 <- pattContext env g typ q
|
||||||
|
let pts = nub ([x | pt@(_,x,_) <- g1, notElem pt g2] ++ [x | pt@(_,x,_) <- g2, notElem pt g1])
|
||||||
|
checkCond
|
||||||
|
("incompatible bindings of" <+>
|
||||||
|
fsep pts <+>
|
||||||
|
"in pattern alterantives" <+> ppPatt Unqualified 0 p) (null pts)
|
||||||
|
return g1 -- must be g1 == g2
|
||||||
|
|
||||||
|
noBind typ p' = do
|
||||||
|
co <- pattContext env g typ p'
|
||||||
|
if not (null co)
|
||||||
|
then checkWarn ("no variable bound inside pattern" <+> ppPatt Unqualified 0 p)
|
||||||
|
>> return []
|
||||||
|
else return []
|
||||||
|
|
||||||
|
checkLookup :: Ident -> Context -> Check Type -- used for looking up Vr x type in context
|
||||||
|
[] -> checkError ("unknown variable" <+> x)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
-------------------------------
|
||||||
|
Grammar/Lookup.hs
|
||||||
|
|
||||||
|
lookupIdent :: ErrorMonad m => Ident -> BinTree Ident b -> m b
|
||||||
|
Bad _ -> raise ("unknown identifier" +++ showIdent c)
|
||||||
|
|
||||||
|
lookupResDefLoc
|
||||||
|
_ -> raise $ render (c <+> "is not defined in resource" <+> m)
|
||||||
|
|
||||||
|
lookupResType :: ErrorMonad m => Grammar -> QIdent -> m Type
|
||||||
|
_ -> raise $ render (c <+> "has no type defined in resource" <+> m)
|
||||||
|
|
||||||
|
lookupOverloadTypes :: ErrorMonad m => Grammar -> QIdent -> m [(Term,Type)]
|
||||||
|
_ -> raise $ render (c <+> "has no types defined in resource" <+> m)
|
||||||
|
|
||||||
|
lookupOverload :: ErrorMonad m => Grammar -> QIdent -> m [([Type],(Type,Term))]
|
||||||
|
_ -> raise $ render (c <+> "is not an overloaded operation")
|
||||||
|
|
||||||
|
|
||||||
|
lookupParamValues :: ErrorMonad m => Grammar -> QIdent -> m [Term]
|
||||||
|
case info of
|
||||||
|
ResParam _ (Just pvs) -> return pvs
|
||||||
|
_ -> raise $ render (ppQIdent Qualified c <+> "has no parameter values defined")
|
||||||
|
|
||||||
|
|
||||||
|
allParamValues :: ErrorMonad m => Grammar -> Type -> m [Term]
|
||||||
|
_ -> raise (render ("cannot find parameter values for" <+> ptyp))
|
||||||
|
|
||||||
|
|
||||||
|
lookupFunType :: ErrorMonad m => Grammar -> ModuleName -> Ident -> m Type
|
||||||
|
_ -> raise (render ("cannot find type of" <+> c))
|
||||||
|
|
||||||
|
lookupCatContext :: ErrorMonad m => Grammar -> ModuleName -> Ident -> m Context
|
||||||
|
_ -> raise (render ("unknown category" <+> c))
|
||||||
|
|
||||||
|
|
||||||
|
-------------------------
|
||||||
|
PatternMatch.hs
|
||||||
|
|
||||||
|
matchPattern :: ErrorMonad m => [(Patt,rhs)] -> Term -> m (rhs, Substitution)
|
||||||
|
if not (isInConstantForm term)
|
||||||
|
then raise (render ("variables occur in" <+> pp term))
|
||||||
|
|
||||||
|
findMatch :: ErrorMonad m => [([Patt],rhs)] -> [Term] -> m (rhs, Substitution)
|
||||||
|
[] -> raise (render ("no applicable case for" <+> hsep (punctuate ',' terms)))
|
||||||
|
(patts,_):_ | length patts /= length terms ->
|
||||||
|
raise (render ("wrong number of args for patterns :" <+> hsep patts <+>
|
||||||
|
"cannot take" <+> hsep terms))
|
||||||
|
|
||||||
|
tryMatch :: (Patt, Term) -> Err [(Ident, Term)]
|
||||||
|
(PNeg p',_) -> case tryMatch (p',t) of
|
||||||
|
Bad _ -> return []
|
||||||
|
_ -> raise (render ("no match with negative pattern" <+> p))
|
||||||
|
|
||||||
|
|
||||||
|
---------------------------------------------
|
||||||
|
Compile.Optimize.hs
|
||||||
|
|
||||||
|
mkLinDefault :: SourceGrammar -> Type -> Err Term
|
||||||
|
_ -> Bad (render ("no parameter values given to type" <+> ppQIdent Qualified p))
|
||||||
|
_ -> Bad (render ("linearization type field cannot be" <+> typ))
|
||||||
|
|
||||||
|
mkLinReference :: SourceGrammar -> Type -> Err Term
|
||||||
|
[] -> Bad "no string"
|
||||||
|
|
||||||
|
|
||||||
|
---------------------------------------------
|
||||||
|
Compile.Compute.Concrete.hs
|
||||||
|
|
||||||
|
nfx env@(GE _ _ _ loc) t = do
|
||||||
|
Left i -> fail ("variable #"++show i++" is out of scope")
|
||||||
|
|
||||||
|
var :: CompleteEnv -> Ident -> Err OpenValue
|
||||||
|
var env x = maybe unbound pick' (elemIndex x (local env))
|
||||||
|
where
|
||||||
|
unbound = fail ("Unknown variable: "++showIdent x)
|
||||||
|
pick' i = return $ \ vs -> maybe (err i vs) ok (pick i vs)
|
||||||
|
err i vs = bug $ "Stack problem: "++showIdent x++": "
|
||||||
|
++unwords (map showIdent (local env))
|
||||||
|
++" => "++show (i,length vs)
|
||||||
|
|
||||||
|
resource env (m,c) =
|
||||||
|
where e = fail $ "Not found: "++render m++"."++showIdent c
|
||||||
|
|
||||||
|
extR t vv =
|
||||||
|
(VRecType rs1, VRecType rs2) ->
|
||||||
|
case intersect (map fst rs1) (map fst rs2) of
|
||||||
|
[] -> VRecType (rs1 ++ rs2)
|
||||||
|
ls -> error $ "clash"<+>show ls
|
||||||
|
(v1,v2) -> error $ "not records" $$ show v1 $$ show v2
|
||||||
|
where
|
||||||
|
error explain = ppbug $ "The term" <+> t
|
||||||
|
<+> "is not reducible" $$ explain
|
||||||
|
|
||||||
|
glue env (v1,v2) = glu v1 v2
|
||||||
|
ppL loc (hang "unsupported token gluing:" 4
|
||||||
|
(Glue (vt v1) (vt v2)))
|
||||||
|
|
||||||
|
strsFromValue :: Value -> Err [Str]
|
||||||
|
_ -> fail ("cannot get Str from value " ++ show t)
|
||||||
|
|
||||||
|
match loc cs v =
|
||||||
|
case value2term loc [] v of
|
||||||
|
Left i -> bad ("variable #"++show i++" is out of scope")
|
||||||
|
Right t -> err bad return (matchPattern cs t)
|
||||||
|
where
|
||||||
|
bad = fail . ("In pattern matching: "++)
|
||||||
|
|
||||||
|
inlinePattMacro p =
|
||||||
|
VPatt p' -> inlinePattMacro p'
|
||||||
|
_ -> ppbug $ hang "Expected pattern macro:" 4
|
||||||
|
|
||||||
|
linPattVars p =
|
||||||
|
if null dups
|
||||||
|
then return pvs
|
||||||
|
else fail.render $ hang "Pattern is not linear:" 4 (ppPatt Unqualified 0 p)
|
||||||
|
|
||||||
|
---------------------------------------------
|
||||||
|
Compile.Compute.Abstract.hs
|
||||||
|
|
||||||
|
|
||||||
|
---------------------------------------------
|
||||||
|
PGF.Linearize.hs
|
||||||
|
|
||||||
|
bracketedLinearize :: PGF -> Language -> Tree -> [BracketedString]
|
||||||
|
cnc = lookMap (error "no lang") lang (concretes pgf)
|
||||||
|
|
||||||
|
|
||||||
|
---------------------------------------------
|
||||||
|
PGF.TypeCheck.hs
|
||||||
|
|
||||||
|
ppTcError :: TcError -> Doc
|
||||||
|
ppTcError (UnknownCat cat) = text "Category" <+> ppCId cat <+> text "is not in scope"
|
||||||
|
ppTcError (UnknownFun fun) = text "Function" <+> ppCId fun <+> text "is not in scope"
|
||||||
|
ppTcError (WrongCatArgs xs ty cat m n) = text "Category" <+> ppCId cat <+> text "should have" <+> int m <+> text "argument(s), but has been given" <+> int n $$
|
||||||
|
text "In the type:" <+> ppType 0 xs ty
|
||||||
|
ppTcError (TypeMismatch xs e ty1 ty2) = text "Couldn't match expected type" <+> ppType 0 xs ty1 $$
|
||||||
|
text " against inferred type" <+> ppType 0 xs ty2 $$
|
||||||
|
text "In the expression:" <+> ppExpr 0 xs e
|
||||||
|
ppTcError (NotFunType xs e ty) = text "A function type is expected for the expression" <+> ppExpr 0 xs e <+> text "instead of type" <+> ppType 0 xs ty
|
||||||
|
ppTcError (CannotInferType xs e) = text "Cannot infer the type of expression" <+> ppExpr 0 xs e
|
||||||
|
ppTcError (UnresolvedMetaVars xs e ms) = text "Meta variable(s)" <+> fsep (List.map ppMeta ms) <+> text "should be resolved" $$
|
||||||
|
text "in the expression:" <+> ppExpr 0 xs e
|
||||||
|
ppTcError (UnexpectedImplArg xs e) = braces (ppExpr 0 xs e) <+> text "is implicit argument but not implicit argument is expected here"
|
||||||
|
ppTcError (UnsolvableGoal xs metaid ty)= text "The goal:" <+> ppMeta metaid <+> colon <+> ppType 0 xs ty $$
|
||||||
|
text "cannot be solved"
|
||||||
|
|
||||||
@@ -1,16 +1,8 @@
|
|||||||
GF Developers Guide
|
GF Developers Guide
|
||||||
Authors: Björn Bringert, Krasimir Angelov and Thomas Hallgren
|
|
||||||
Last update: %%mtime(%F, %H:%M)
|
|
||||||
|
|
||||||
% NOTE: this is a txt2tags file.
|
2018-07-26
|
||||||
% Create an html file from this file using:
|
|
||||||
% txt2tags -t html gf-developers.t2t
|
|
||||||
|
|
||||||
%!style:../css/style.css
|
|
||||||
%!target:html
|
|
||||||
%!options(html): --toc
|
%!options(html): --toc
|
||||||
%!encoding:utf-8
|
|
||||||
%!postproc(html): <H1> <H1><a href="../"><IMG src="../doc/Logos/gf0.png"></a>
|
|
||||||
|
|
||||||
== Before you start ==
|
== Before you start ==
|
||||||
|
|
||||||
@@ -63,18 +55,6 @@ Other required tools included in the Haskell Platform are
|
|||||||
and
|
and
|
||||||
[Happy http://www.haskell.org/happy/].
|
[Happy http://www.haskell.org/happy/].
|
||||||
|
|
||||||
%=== Darcs ===
|
|
||||||
%
|
|
||||||
%To get the GF source code, you also need //Darcs//, version 2 or later.
|
|
||||||
%Darcs 2.10 is recommended (July 2015).
|
|
||||||
%
|
|
||||||
%//Darcs//
|
|
||||||
%is a distributed version control system, see http://darcs.net/ for
|
|
||||||
%more information. There are precompiled packages for many platforms
|
|
||||||
%available and source code if you want to compile it yourself. Darcs
|
|
||||||
%is also written in Haskell and so you can use GHC to compile it.
|
|
||||||
|
|
||||||
|
|
||||||
=== Git ===
|
=== Git ===
|
||||||
|
|
||||||
To get the GF source code, you also need //Git//.
|
To get the GF source code, you also need //Git//.
|
||||||
@@ -425,13 +405,13 @@ There is also ``make build``, ``make copy`` and ``make clean`` which do what you
|
|||||||
=== Advanced ===
|
=== Advanced ===
|
||||||
For advanced build options, call the Haskell build script directly:
|
For advanced build options, call the Haskell build script directly:
|
||||||
```
|
```
|
||||||
$ runghc Make.hs ...
|
$ runghc Setup.hs ...
|
||||||
```
|
```
|
||||||
For more details see the [README https://github.com/GrammaticalFramework/gf-rgl/blob/master/README.md].
|
For more details see the [README https://github.com/GrammaticalFramework/gf-rgl/blob/master/README.md].
|
||||||
|
|
||||||
=== Haskell-free ===
|
=== Haskell-free ===
|
||||||
If you do not have Haskell installed, you can use the simple build script ``Make.sh``
|
If you do not have Haskell installed, you can use the simple build script ``Setup.sh``
|
||||||
(or ``Make.bat`` for Windows).
|
(or ``Setup.bat`` for Windows).
|
||||||
|
|
||||||
|
|
||||||
== Creating binary distribution packages ==
|
== Creating binary distribution packages ==
|
||||||
|
|||||||
@@ -1,12 +1,6 @@
|
|||||||
Editor modes & IDE integration for GF
|
Editor modes & IDE integration for GF
|
||||||
|
|
||||||
|
|
||||||
%!style:../css/style.css
|
|
||||||
%!options(html): --toc
|
|
||||||
%!postproc(html): <TITLE> <meta name = "viewport" content = "width = device-width"><TITLE>
|
|
||||||
%!encoding:utf-8
|
|
||||||
%!postproc(html): <H1> <H1><a href="../"><IMG src="../doc/Logos/gf0.png"></a>
|
|
||||||
|
|
||||||
We collect GF modes for various editors on this page. Contributions are
|
We collect GF modes for various editors on this page. Contributions are
|
||||||
welcome!
|
welcome!
|
||||||
|
|
||||||
|
|||||||
@@ -1,91 +0,0 @@
|
|||||||
Grammatical Framework: Frequently Asked Quuestions
|
|
||||||
Aarne Ranta
|
|
||||||
%%date(%c)
|
|
||||||
|
|
||||||
% NOTE: this is a txt2tags file.
|
|
||||||
% Create an html file from this file using:
|
|
||||||
% txt2tags gf-bibliography.t2t
|
|
||||||
|
|
||||||
%!style:../css/style.css
|
|
||||||
%!target:html
|
|
||||||
%!options(html): --toc
|
|
||||||
%!postproc(html): <TITLE> <meta name = "viewport" content = "width = device-width"><TITLE>
|
|
||||||
%!postproc(html): #BR <br>
|
|
||||||
%!encoding:utf-8
|
|
||||||
%!postproc(html): <H1> <H1><a href="../"><IMG src="../doc/Logos/gf0.png"></a>
|
|
||||||
|
|
||||||
|
|
||||||
===What has been done with GF?===
|
|
||||||
|
|
||||||
**Translation**: systems with any number of parallel languages, with input in one language and output in all the others.
|
|
||||||
|
|
||||||
**Natural language generation** (NLG): translation from a formal language to natural languages.
|
|
||||||
|
|
||||||
**Ontology verbalization** is a special case of NLG.
|
|
||||||
|
|
||||||
**Language training**: grammar and vocabulary training systems.
|
|
||||||
|
|
||||||
**Human-computer interaction**: natural language interfaces, spoken dialogue systems.
|
|
||||||
|
|
||||||
**Linguistics**: comparisons between languages.
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
===What parts does GF have?===
|
|
||||||
|
|
||||||
A **grammar compiler**, used for compiling grammars to parsing, generation, and translation code.
|
|
||||||
|
|
||||||
A **run-time system**, used for parsing, generation and translation. The run-time system is available in several languages:
|
|
||||||
Haskell, Java, C, C++, Javascript, and Python. The point with this is that you can include GF-based parsing and generation in
|
|
||||||
larger programs written in any of these languages.
|
|
||||||
|
|
||||||
A **resource grammar library**, containing the morphology and basic syntax of currently 26 languages.
|
|
||||||
|
|
||||||
A **web application toolkit**, containing server-side (Haskell) and client-side (Javascript) libraries.
|
|
||||||
|
|
||||||
An **integrated development environment**, the GF-Eclipse plug-in.
|
|
||||||
|
|
||||||
A **shell**, i.e. a command interpreter for testing and developing GF grammars. This is the program started by the command ``gf`` in a terminal.
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
===Is GF open-source?===
|
|
||||||
|
|
||||||
|
|
||||||
===Can I use GF for commercial applications?===
|
|
||||||
|
|
||||||
Yes. Those parts of GF that you will need to distribute - the run-time system and the libraries - are licensed under LGPL and BSD; it's up to you to choose which.
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
===When was GF started?===
|
|
||||||
|
|
||||||
|
|
||||||
===Where does the name GF come from?===
|
|
||||||
|
|
||||||
GF = Grammatical Framework = LF + concrete syntax
|
|
||||||
|
|
||||||
LF = Logical Framework
|
|
||||||
|
|
||||||
Logical Frameworks are implementations of type theory, which have been built since the 1980's to support formalized mathematics. GF has its roots in
|
|
||||||
type theory, which is widely used in the semantics of natural language. Some of these ideas were first implemented in ALF, Another Logical Framework,
|
|
||||||
in 1992; the book //Type-Theoretical Grammar// (by A. Ranta, OUP 1994) has a chapter and an appendix on this. The first implementations did not have
|
|
||||||
a parser, and GF proper, started in 1998, was an implementation of yet another LF together with concrete syntax supporting generation and parsing.
|
|
||||||
Grammatical Framework was a natural name for this. We tried to avoid it in the beginning, because it sounded pretentious in its generality. But the
|
|
||||||
name was just too natural to be avoided.
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
===Is GF backward compatible?===
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
===Do I need Haskell to use GF?===
|
|
||||||
|
|
||||||
No. GF is a language of its own, and you don't need to know Haskell. And if you download the GF binary, you don't need any Haskell tools. But if you want to
|
|
||||||
become a GF developer, then it's better you install GF from the latest source, and then you need the GHC Haskell compiler to compile GF. But even then, you
|
|
||||||
don't need to know Haskell yourself.
|
|
||||||
|
|
||||||
|
|
||||||
===What is a lock field?===
|
|
||||||
|
|
||||||
@@ -68,9 +68,9 @@ metavariables and the type of the expression.
|
|||||||
Prints a set of strings in the .dot format (the graphviz format).
|
Prints a set of strings in the .dot format (the graphviz format).
|
||||||
The graph can be saved in a file by the wf command as usual.
|
The graph can be saved in a file by the wf command as usual.
|
||||||
If the -view flag is defined, the graph is saved in a temporary file
|
If the -view flag is defined, the graph is saved in a temporary file
|
||||||
which is processed by graphviz and displayed by the program indicated
|
which is processed by 'dot' (graphviz) and displayed by the program indicated
|
||||||
by the flag. The target format is postscript, unless overridden by the
|
by the view flag. The target format is png, unless overridden by the
|
||||||
flag -format.
|
flag -format. Results from multiple trees are combined to pdf with convert (ImageMagick).
|
||||||
|
|
||||||
|
|
||||||
- Options:
|
- Options:
|
||||||
@@ -151,6 +151,7 @@ of a pipe.
|
|||||||
| ``-one`` | pick the first strings, if there is any, from records and tables
|
| ``-one`` | pick the first strings, if there is any, from records and tables
|
||||||
| ``-table`` | show all strings labelled by parameters
|
| ``-table`` | show all strings labelled by parameters
|
||||||
| ``-unqual`` | hide qualifying module names
|
| ``-unqual`` | hide qualifying module names
|
||||||
|
| ``-trace`` | trace computations
|
||||||
|
|
||||||
|
|
||||||
#NORMAL
|
#NORMAL
|
||||||
@@ -242,7 +243,7 @@ and thus cannot be a part of a pipe.
|
|||||||
|
|
||||||
====e = empty====
|
====e = empty====
|
||||||
#NOINDENT
|
#NOINDENT
|
||||||
``e`` = ``empty``: //empty the environment.//
|
``e`` = ``empty``: //empty the environment (except the command history).//
|
||||||
|
|
||||||
#TINY
|
#TINY
|
||||||
|
|
||||||
@@ -281,6 +282,19 @@ but the resulting .gf file must be imported separately.
|
|||||||
#NORMAL
|
#NORMAL
|
||||||
|
|
||||||
|
|
||||||
|
#VSPACE
|
||||||
|
|
||||||
|
====eh = execute_history====
|
||||||
|
#NOINDENT
|
||||||
|
``eh`` = ``execute_history``: //read commands from a file and execute them.//
|
||||||
|
|
||||||
|
#TINY
|
||||||
|
|
||||||
|
- Syntax: ``eh FILE``
|
||||||
|
|
||||||
|
#NORMAL
|
||||||
|
|
||||||
|
|
||||||
#VSPACE
|
#VSPACE
|
||||||
|
|
||||||
====gr = generate_random====
|
====gr = generate_random====
|
||||||
@@ -434,12 +448,14 @@ sequences; see example.
|
|||||||
| ``-list`` | show all forms and variants, comma-separated on one line (cf. l -all)
|
| ``-list`` | show all forms and variants, comma-separated on one line (cf. l -all)
|
||||||
| ``-multi`` | linearize to all languages (default)
|
| ``-multi`` | linearize to all languages (default)
|
||||||
| ``-table`` | show all forms labelled by parameters
|
| ``-table`` | show all forms labelled by parameters
|
||||||
|
| ``-tabtreebank`` | show the tree and its linearizations on a tab-separated line
|
||||||
| ``-treebank`` | show the tree and tag linearizations with language names
|
| ``-treebank`` | show the tree and tag linearizations with language names
|
||||||
| ``-bind`` | bind tokens separated by Prelude.BIND, i.e. &+
|
| ``-bind`` | bind tokens separated by Prelude.BIND, i.e. &+
|
||||||
| ``-chars`` | lexer that makes every non-space character a token
|
| ``-chars`` | lexer that makes every non-space character a token
|
||||||
| ``-from_amharic`` | from unicode to GF Amharic transliteration
|
| ``-from_amharic`` | from unicode to GF Amharic transliteration
|
||||||
| ``-from_ancientgreek`` | from unicode to GF ancient Greek transliteration
|
| ``-from_ancientgreek`` | from unicode to GF ancient Greek transliteration
|
||||||
| ``-from_arabic`` | from unicode to GF Arabic transliteration
|
| ``-from_arabic`` | from unicode to GF Arabic transliteration
|
||||||
|
| ``-from_arabic_unvocalized`` | from unicode to GF unvocalized Arabic transliteration
|
||||||
| ``-from_cp1251`` | decode from cp1251 (Cyrillic used in Bulgarian resource)
|
| ``-from_cp1251`` | decode from cp1251 (Cyrillic used in Bulgarian resource)
|
||||||
| ``-from_devanagari`` | from unicode to GF Devanagari transliteration
|
| ``-from_devanagari`` | from unicode to GF Devanagari transliteration
|
||||||
| ``-from_greek`` | from unicode to GF modern Greek transliteration
|
| ``-from_greek`` | from unicode to GF modern Greek transliteration
|
||||||
@@ -453,11 +469,14 @@ sequences; see example.
|
|||||||
| ``-from_urdu`` | from unicode to GF Urdu transliteration
|
| ``-from_urdu`` | from unicode to GF Urdu transliteration
|
||||||
| ``-from_utf8`` | decode from utf8 (default)
|
| ``-from_utf8`` | decode from utf8 (default)
|
||||||
| ``-lexcode`` | code-like lexer
|
| ``-lexcode`` | code-like lexer
|
||||||
|
| ``-lexgreek`` | lexer normalizing ancient Greek accentuation
|
||||||
|
| ``-lexgreek2`` | lexer normalizing ancient Greek accentuation for text with vowel length annotations
|
||||||
| ``-lexmixed`` | mixture of text and code, as in LaTeX (code between $...$, \(...)\, \[...\])
|
| ``-lexmixed`` | mixture of text and code, as in LaTeX (code between $...$, \(...)\, \[...\])
|
||||||
| ``-lextext`` | text-like lexer
|
| ``-lextext`` | text-like lexer
|
||||||
| ``-to_amharic`` | from GF Amharic transliteration to unicode
|
| ``-to_amharic`` | from GF Amharic transliteration to unicode
|
||||||
| ``-to_ancientgreek`` | from GF ancient Greek transliteration to unicode
|
| ``-to_ancientgreek`` | from GF ancient Greek transliteration to unicode
|
||||||
| ``-to_arabic`` | from GF Arabic transliteration to unicode
|
| ``-to_arabic`` | from GF Arabic transliteration to unicode
|
||||||
|
| ``-to_arabic_unvocalized`` | from GF unvocalized Arabic transliteration to unicode
|
||||||
| ``-to_cp1251`` | encode to cp1251 (Cyrillic used in Bulgarian resource)
|
| ``-to_cp1251`` | encode to cp1251 (Cyrillic used in Bulgarian resource)
|
||||||
| ``-to_devanagari`` | from GF Devanagari transliteration to unicode
|
| ``-to_devanagari`` | from GF Devanagari transliteration to unicode
|
||||||
| ``-to_greek`` | from GF modern Greek transliteration to unicode
|
| ``-to_greek`` | from GF modern Greek transliteration to unicode
|
||||||
@@ -473,6 +492,7 @@ sequences; see example.
|
|||||||
| ``-to_utf8`` | encode to utf8 (default)
|
| ``-to_utf8`` | encode to utf8 (default)
|
||||||
| ``-unchars`` | unlexer that puts no spaces between tokens
|
| ``-unchars`` | unlexer that puts no spaces between tokens
|
||||||
| ``-unlexcode`` | code-like unlexer
|
| ``-unlexcode`` | code-like unlexer
|
||||||
|
| ``-unlexgreek`` | unlexer de-normalizing ancient Greek accentuation
|
||||||
| ``-unlexmixed`` | mixture of text and code (code between $...$, \(...)\, \[...\])
|
| ``-unlexmixed`` | mixture of text and code (code between $...$, \(...)\, \[...\])
|
||||||
| ``-unlextext`` | text-like unlexer
|
| ``-unlextext`` | text-like unlexer
|
||||||
| ``-unwords`` | unlexer that puts a single space between tokens (default)
|
| ``-unwords`` | unlexer that puts a single space between tokens (default)
|
||||||
@@ -513,6 +533,7 @@ trees where a function node is a metavariable.
|
|||||||
| ``-from_amharic`` | from unicode to GF Amharic transliteration
|
| ``-from_amharic`` | from unicode to GF Amharic transliteration
|
||||||
| ``-from_ancientgreek`` | from unicode to GF ancient Greek transliteration
|
| ``-from_ancientgreek`` | from unicode to GF ancient Greek transliteration
|
||||||
| ``-from_arabic`` | from unicode to GF Arabic transliteration
|
| ``-from_arabic`` | from unicode to GF Arabic transliteration
|
||||||
|
| ``-from_arabic_unvocalized`` | from unicode to GF unvocalized Arabic transliteration
|
||||||
| ``-from_cp1251`` | decode from cp1251 (Cyrillic used in Bulgarian resource)
|
| ``-from_cp1251`` | decode from cp1251 (Cyrillic used in Bulgarian resource)
|
||||||
| ``-from_devanagari`` | from unicode to GF Devanagari transliteration
|
| ``-from_devanagari`` | from unicode to GF Devanagari transliteration
|
||||||
| ``-from_greek`` | from unicode to GF modern Greek transliteration
|
| ``-from_greek`` | from unicode to GF modern Greek transliteration
|
||||||
@@ -526,11 +547,14 @@ trees where a function node is a metavariable.
|
|||||||
| ``-from_urdu`` | from unicode to GF Urdu transliteration
|
| ``-from_urdu`` | from unicode to GF Urdu transliteration
|
||||||
| ``-from_utf8`` | decode from utf8 (default)
|
| ``-from_utf8`` | decode from utf8 (default)
|
||||||
| ``-lexcode`` | code-like lexer
|
| ``-lexcode`` | code-like lexer
|
||||||
|
| ``-lexgreek`` | lexer normalizing ancient Greek accentuation
|
||||||
|
| ``-lexgreek2`` | lexer normalizing ancient Greek accentuation for text with vowel length annotations
|
||||||
| ``-lexmixed`` | mixture of text and code, as in LaTeX (code between $...$, \(...)\, \[...\])
|
| ``-lexmixed`` | mixture of text and code, as in LaTeX (code between $...$, \(...)\, \[...\])
|
||||||
| ``-lextext`` | text-like lexer
|
| ``-lextext`` | text-like lexer
|
||||||
| ``-to_amharic`` | from GF Amharic transliteration to unicode
|
| ``-to_amharic`` | from GF Amharic transliteration to unicode
|
||||||
| ``-to_ancientgreek`` | from GF ancient Greek transliteration to unicode
|
| ``-to_ancientgreek`` | from GF ancient Greek transliteration to unicode
|
||||||
| ``-to_arabic`` | from GF Arabic transliteration to unicode
|
| ``-to_arabic`` | from GF Arabic transliteration to unicode
|
||||||
|
| ``-to_arabic_unvocalized`` | from GF unvocalized Arabic transliteration to unicode
|
||||||
| ``-to_cp1251`` | encode to cp1251 (Cyrillic used in Bulgarian resource)
|
| ``-to_cp1251`` | encode to cp1251 (Cyrillic used in Bulgarian resource)
|
||||||
| ``-to_devanagari`` | from GF Devanagari transliteration to unicode
|
| ``-to_devanagari`` | from GF Devanagari transliteration to unicode
|
||||||
| ``-to_greek`` | from GF modern Greek transliteration to unicode
|
| ``-to_greek`` | from GF modern Greek transliteration to unicode
|
||||||
@@ -546,6 +570,7 @@ trees where a function node is a metavariable.
|
|||||||
| ``-to_utf8`` | encode to utf8 (default)
|
| ``-to_utf8`` | encode to utf8 (default)
|
||||||
| ``-unchars`` | unlexer that puts no spaces between tokens
|
| ``-unchars`` | unlexer that puts no spaces between tokens
|
||||||
| ``-unlexcode`` | code-like unlexer
|
| ``-unlexcode`` | code-like unlexer
|
||||||
|
| ``-unlexgreek`` | unlexer de-normalizing ancient Greek accentuation
|
||||||
| ``-unlexmixed`` | mixture of text and code (code between $...$, \(...)\, \[...\])
|
| ``-unlexmixed`` | mixture of text and code (code between $...$, \(...)\, \[...\])
|
||||||
| ``-unlextext`` | text-like unlexer
|
| ``-unlextext`` | text-like unlexer
|
||||||
| ``-unwords`` | unlexer that puts a single space between tokens (default)
|
| ``-unwords`` | unlexer that puts a single space between tokens (default)
|
||||||
@@ -666,10 +691,9 @@ command (flag -printer):
|
|||||||
fa finite automaton in graphviz format
|
fa finite automaton in graphviz format
|
||||||
gsl Nuance speech recognition format
|
gsl Nuance speech recognition format
|
||||||
haskell Haskell (abstract syntax)
|
haskell Haskell (abstract syntax)
|
||||||
|
java Java (abstract syntax)
|
||||||
js JavaScript (whole grammar)
|
js JavaScript (whole grammar)
|
||||||
jsgf JSGF speech recognition format
|
jsgf JSGF speech recognition format
|
||||||
lambda_prolog LambdaProlog (abstract syntax)
|
|
||||||
lp_byte_code Bytecode for Teyjus (abstract syntax, experimental)
|
|
||||||
pgf_pretty human-readable pgf
|
pgf_pretty human-readable pgf
|
||||||
prolog Prolog (whole grammar)
|
prolog Prolog (whole grammar)
|
||||||
python Python (whole grammar)
|
python Python (whole grammar)
|
||||||
@@ -753,6 +777,7 @@ To see transliteration tables, use command ut.
|
|||||||
| ``-from_amharic`` | from unicode to GF Amharic transliteration
|
| ``-from_amharic`` | from unicode to GF Amharic transliteration
|
||||||
| ``-from_ancientgreek`` | from unicode to GF ancient Greek transliteration
|
| ``-from_ancientgreek`` | from unicode to GF ancient Greek transliteration
|
||||||
| ``-from_arabic`` | from unicode to GF Arabic transliteration
|
| ``-from_arabic`` | from unicode to GF Arabic transliteration
|
||||||
|
| ``-from_arabic_unvocalized`` | from unicode to GF unvocalized Arabic transliteration
|
||||||
| ``-from_cp1251`` | decode from cp1251 (Cyrillic used in Bulgarian resource)
|
| ``-from_cp1251`` | decode from cp1251 (Cyrillic used in Bulgarian resource)
|
||||||
| ``-from_devanagari`` | from unicode to GF Devanagari transliteration
|
| ``-from_devanagari`` | from unicode to GF Devanagari transliteration
|
||||||
| ``-from_greek`` | from unicode to GF modern Greek transliteration
|
| ``-from_greek`` | from unicode to GF modern Greek transliteration
|
||||||
@@ -766,11 +791,14 @@ To see transliteration tables, use command ut.
|
|||||||
| ``-from_urdu`` | from unicode to GF Urdu transliteration
|
| ``-from_urdu`` | from unicode to GF Urdu transliteration
|
||||||
| ``-from_utf8`` | decode from utf8 (default)
|
| ``-from_utf8`` | decode from utf8 (default)
|
||||||
| ``-lexcode`` | code-like lexer
|
| ``-lexcode`` | code-like lexer
|
||||||
|
| ``-lexgreek`` | lexer normalizing ancient Greek accentuation
|
||||||
|
| ``-lexgreek2`` | lexer normalizing ancient Greek accentuation for text with vowel length annotations
|
||||||
| ``-lexmixed`` | mixture of text and code, as in LaTeX (code between $...$, \(...)\, \[...\])
|
| ``-lexmixed`` | mixture of text and code, as in LaTeX (code between $...$, \(...)\, \[...\])
|
||||||
| ``-lextext`` | text-like lexer
|
| ``-lextext`` | text-like lexer
|
||||||
| ``-to_amharic`` | from GF Amharic transliteration to unicode
|
| ``-to_amharic`` | from GF Amharic transliteration to unicode
|
||||||
| ``-to_ancientgreek`` | from GF ancient Greek transliteration to unicode
|
| ``-to_ancientgreek`` | from GF ancient Greek transliteration to unicode
|
||||||
| ``-to_arabic`` | from GF Arabic transliteration to unicode
|
| ``-to_arabic`` | from GF Arabic transliteration to unicode
|
||||||
|
| ``-to_arabic_unvocalized`` | from GF unvocalized Arabic transliteration to unicode
|
||||||
| ``-to_cp1251`` | encode to cp1251 (Cyrillic used in Bulgarian resource)
|
| ``-to_cp1251`` | encode to cp1251 (Cyrillic used in Bulgarian resource)
|
||||||
| ``-to_devanagari`` | from GF Devanagari transliteration to unicode
|
| ``-to_devanagari`` | from GF Devanagari transliteration to unicode
|
||||||
| ``-to_greek`` | from GF modern Greek transliteration to unicode
|
| ``-to_greek`` | from GF modern Greek transliteration to unicode
|
||||||
@@ -786,6 +814,7 @@ To see transliteration tables, use command ut.
|
|||||||
| ``-to_utf8`` | encode to utf8 (default)
|
| ``-to_utf8`` | encode to utf8 (default)
|
||||||
| ``-unchars`` | unlexer that puts no spaces between tokens
|
| ``-unchars`` | unlexer that puts no spaces between tokens
|
||||||
| ``-unlexcode`` | code-like unlexer
|
| ``-unlexcode`` | code-like unlexer
|
||||||
|
| ``-unlexgreek`` | unlexer de-normalizing ancient Greek accentuation
|
||||||
| ``-unlexmixed`` | mixture of text and code (code between $...$, \(...)\, \[...\])
|
| ``-unlexmixed`` | mixture of text and code (code between $...$, \(...)\, \[...\])
|
||||||
| ``-unlextext`` | text-like unlexer
|
| ``-unlextext`` | text-like unlexer
|
||||||
| ``-unwords`` | unlexer that puts a single space between tokens (default)
|
| ``-unwords`` | unlexer that puts a single space between tokens (default)
|
||||||
@@ -799,13 +828,14 @@ To see transliteration tables, use command ut.
|
|||||||
|
|
||||||
- Examples:
|
- Examples:
|
||||||
|
|
||||||
| ``l (EAdd 3 4) | ps -code`` | linearize code-like output
|
| ``l (EAdd 3 4) | ps -unlexcode`` | linearize code-like output
|
||||||
| ``ps -lexer=code | p -cat=Exp`` | parse code-like input
|
| ``ps -lexcode | p -cat=Exp`` | parse code-like input
|
||||||
| ``gr -cat=QCl | l | ps -bind`` | linearization output from LangFin
|
| ``gr -cat=QCl | l | ps -bind`` | linearization output from LangFin
|
||||||
| ``ps -to_devanagari "A-p"`` | show Devanagari in UTF8 terminal
|
| ``ps -to_devanagari "A-p"`` | show Devanagari in UTF8 terminal
|
||||||
| ``rf -file=Hin.gf | ps -env=quotes -to_devanagari`` | convert translit to UTF8
|
| ``rf -file=Hin.gf | ps -env=quotes -to_devanagari`` | convert translit to UTF8
|
||||||
| ``rf -file=Ara.gf | ps -from_utf8 -env=quotes -from_arabic`` | convert UTF8 to transliteration
|
| ``rf -file=Ara.gf | ps -from_utf8 -env=quotes -from_arabic`` | convert UTF8 to transliteration
|
||||||
| ``ps -to=chinese.trans "abc"`` | apply transliteration defined in file chinese.trans
|
| ``ps -to=chinese.trans "abc"`` | apply transliteration defined in file chinese.trans
|
||||||
|
| ``ps -lexgreek "a)gavoi` a)'nvrwpoi' tines*"`` | normalize ancient greek accentuation
|
||||||
|
|
||||||
|
|
||||||
#NORMAL
|
#NORMAL
|
||||||
@@ -828,7 +858,6 @@ are type checking and semantic computation.
|
|||||||
- Options:
|
- Options:
|
||||||
|
|
||||||
| ``-compute`` | compute by using semantic definitions (def)
|
| ``-compute`` | compute by using semantic definitions (def)
|
||||||
| ``-paraphrase`` | paraphrase by using semantic definitions (def)
|
|
||||||
| ``-largest`` | sort trees from largest to smallest, in number of nodes
|
| ``-largest`` | sort trees from largest to smallest, in number of nodes
|
||||||
| ``-nub`` | remove duplicate trees
|
| ``-nub`` | remove duplicate trees
|
||||||
| ``-smallest`` | sort trees from smallest to largest, in number of nodes
|
| ``-smallest`` | sort trees from smallest to largest, in number of nodes
|
||||||
@@ -838,12 +867,10 @@ are type checking and semantic computation.
|
|||||||
- Flags:
|
- Flags:
|
||||||
|
|
||||||
| ``-number`` | take at most this many trees
|
| ``-number`` | take at most this many trees
|
||||||
| ``-transfer`` | syntactic transfer by applying function, recursively in subtrees
|
|
||||||
|
|
||||||
- Examples:
|
- Examples:
|
||||||
|
|
||||||
| ``pt -compute (plus one two)`` | compute value
|
| ``pt -compute (plus one two)`` | compute value
|
||||||
| ``p "4 dogs love 5 cats" | pt -transfer=digits2numeral | l`` | four...five...
|
|
||||||
|
|
||||||
|
|
||||||
#NORMAL
|
#NORMAL
|
||||||
@@ -990,8 +1017,6 @@ This command requires a source grammar to be in scope, imported with 'import -re
|
|||||||
The operations include the parameter constructors that are in scope.
|
The operations include the parameter constructors that are in scope.
|
||||||
The optional TYPE filters according to the value type.
|
The optional TYPE filters according to the value type.
|
||||||
The grep STRINGs filter according to other substrings of the type signatures.
|
The grep STRINGs filter according to other substrings of the type signatures.
|
||||||
This command must be a line of its own, and thus cannot be a part
|
|
||||||
of a pipe.
|
|
||||||
|
|
||||||
- Syntax: ``so (-grep=STRING)* TYPE?``
|
- Syntax: ``so (-grep=STRING)* TYPE?``
|
||||||
- Options:
|
- Options:
|
||||||
@@ -1002,6 +1027,12 @@ of a pipe.
|
|||||||
|
|
||||||
| ``-grep`` | substring used for filtering (the command can have many of these)
|
| ``-grep`` | substring used for filtering (the command can have many of these)
|
||||||
|
|
||||||
|
- Examples:
|
||||||
|
|
||||||
|
| ``so Det`` | show all opers that create a Det
|
||||||
|
| ``so -grep=Prep`` | find opers relating to Prep
|
||||||
|
| ``so | wf -file=/tmp/opers`` | write the list of opers to a file
|
||||||
|
|
||||||
|
|
||||||
#NORMAL
|
#NORMAL
|
||||||
|
|
||||||
@@ -1113,6 +1144,7 @@ This command must be a line of its own, and thus cannot be a part of a pipe.
|
|||||||
| ``-amharic`` | Amharic
|
| ``-amharic`` | Amharic
|
||||||
| ``-ancientgreek`` | ancient Greek
|
| ``-ancientgreek`` | ancient Greek
|
||||||
| ``-arabic`` | Arabic
|
| ``-arabic`` | Arabic
|
||||||
|
| ``-arabic_unvocalized`` | unvocalized Arabic
|
||||||
| ``-devanagari`` | Devanagari
|
| ``-devanagari`` | Devanagari
|
||||||
| ``-greek`` | modern Greek
|
| ``-greek`` | modern Greek
|
||||||
| ``-hebrew`` | unvocalized Hebrew
|
| ``-hebrew`` | unvocalized Hebrew
|
||||||
@@ -1137,35 +1169,41 @@ This command must be a line of its own, and thus cannot be a part of a pipe.
|
|||||||
#TINY
|
#TINY
|
||||||
|
|
||||||
Prints a dependency tree in the .dot format (the graphviz format, default)
|
Prints a dependency tree in the .dot format (the graphviz format, default)
|
||||||
|
or LaTeX (flag -output=latex)
|
||||||
or the CoNLL/MaltParser format (flag -output=conll for training, malt_input
|
or the CoNLL/MaltParser format (flag -output=conll for training, malt_input
|
||||||
for unanalysed input).
|
for unanalysed input).
|
||||||
By default, the last argument is the head of every abstract syntax
|
By default, the last argument is the head of every abstract syntax
|
||||||
function; moreover, the head depends on the head of the function above.
|
function; moreover, the head depends on the head of the function above.
|
||||||
The graph can be saved in a file by the wf command as usual.
|
The graph can be saved in a file by the wf command as usual.
|
||||||
If the -view flag is defined, the graph is saved in a temporary file
|
If the -view flag is defined, the graph is saved in a temporary file
|
||||||
which is processed by graphviz and displayed by the program indicated
|
which is processed by dot (graphviz) and displayed by the program indicated
|
||||||
by the flag. The target format is png, unless overridden by the
|
by the view flag. The target format is png, unless overridden by the
|
||||||
flag -format.
|
flag -format. Results from multiple trees are combined to pdf with convert (ImageMagick).
|
||||||
|
See also 'vp -showdep' for another visualization of dependencies.
|
||||||
|
|
||||||
|
|
||||||
- Options:
|
- Options:
|
||||||
|
|
||||||
| ``-v`` | show extra information
|
| ``-v`` | show extra information
|
||||||
|
| ``-conll2latex`` | convert conll to latex
|
||||||
|
|
||||||
- Flags:
|
- Flags:
|
||||||
|
|
||||||
| ``-file`` | configuration file for labels per fun, format 'fun l1 ... label ... l2'
|
| ``-abslabels`` | abstract configuration file for labels, format per line 'fun label*'
|
||||||
| ``-format`` | format of the visualization file (default "png")
|
| ``-cnclabels`` | concrete configuration file for labels, format per line 'fun {words|*} pos label head'
|
||||||
| ``-output`` | output format of graph source (default "dot")
|
| ``-file`` | same as abslabels (abstract configuration file)
|
||||||
| ``-view`` | program to open the resulting file (default "open")
|
| ``-format`` | format of the visualization file using dot (default "png")
|
||||||
|
| ``-output`` | output format of graph source (latex, conll, dot (default but deprecated))
|
||||||
|
| ``-view`` | program to open the resulting graph file (default "open")
|
||||||
| ``-lang`` | the language of analysis
|
| ``-lang`` | the language of analysis
|
||||||
|
|
||||||
- Examples:
|
- Examples:
|
||||||
|
|
||||||
| ``gr | vd`` | generate a tree and show dependency tree in .dot
|
| ``gr | vd`` | generate a tree and show dependency tree in .dot
|
||||||
| ``gr | vd -view=open`` | generate a tree and display dependency tree on a Mac
|
| ``gr | vd -view=open`` | generate a tree and display dependency tree on with Mac's 'open'
|
||||||
| ``gr -number=1000 | vd -file=dep.labels -output=malt`` | generate training treebank
|
| ``gr | vd -view=open -output=latex`` | generate a tree and display latex dependency tree with Mac's 'open'
|
||||||
| ``gr -number=100 | vd -file=dep.labels -output=malt_input`` | generate test sentences
|
| ``gr -number=1000 | vd -abslabels=Lang.labels -cnclabels=LangSwe.labels -output=conll`` | generate a random treebank
|
||||||
|
| ``rf -file=ex.conll | vd -conll2latex | wf -file=ex.tex`` | convert conll file to latex
|
||||||
|
|
||||||
|
|
||||||
#NORMAL
|
#NORMAL
|
||||||
@@ -1182,15 +1220,16 @@ flag -format.
|
|||||||
Prints a parse tree in the .dot format (the graphviz format).
|
Prints a parse tree in the .dot format (the graphviz format).
|
||||||
The graph can be saved in a file by the wf command as usual.
|
The graph can be saved in a file by the wf command as usual.
|
||||||
If the -view flag is defined, the graph is saved in a temporary file
|
If the -view flag is defined, the graph is saved in a temporary file
|
||||||
which is processed by graphviz and displayed by the program indicated
|
which is processed by dot (graphviz) and displayed by the program indicated
|
||||||
by the flag. The target format is png, unless overridden by the
|
by the view flag. The target format is png, unless overridden by the
|
||||||
flag -format.
|
flag -format. Results from multiple trees are combined to pdf with convert (ImageMagick).
|
||||||
|
|
||||||
|
|
||||||
- Options:
|
- Options:
|
||||||
|
|
||||||
| ``-showcat`` | show categories in the tree nodes (default)
|
| ``-showcat`` | show categories in the tree nodes (default)
|
||||||
| ``-nocat`` | don't show categories
|
| ``-nocat`` | don't show categories
|
||||||
|
| ``-showdep`` | show dependency labels
|
||||||
| ``-showfun`` | show function names in the tree nodes
|
| ``-showfun`` | show function names in the tree nodes
|
||||||
| ``-nofun`` | don't show function names (default)
|
| ``-nofun`` | don't show function names (default)
|
||||||
| ``-showleaves`` | show the leaves of the tree (default)
|
| ``-showleaves`` | show the leaves of the tree (default)
|
||||||
@@ -1198,6 +1237,8 @@ flag -format.
|
|||||||
|
|
||||||
- Flags:
|
- Flags:
|
||||||
|
|
||||||
|
| ``-lang`` | the language to visualize
|
||||||
|
| ``-file`` | configuration file for dependency labels with -deps, format per line 'fun label*'
|
||||||
| ``-format`` | format of the visualization file (default "png")
|
| ``-format`` | format of the visualization file (default "png")
|
||||||
| ``-view`` | program to open the resulting file (default "open")
|
| ``-view`` | program to open the resulting file (default "open")
|
||||||
| ``-nodefont`` | font for tree nodes (default: Times -- graphviz standard font)
|
| ``-nodefont`` | font for tree nodes (default: Times -- graphviz standard font)
|
||||||
@@ -1210,7 +1251,8 @@ flag -format.
|
|||||||
- Examples:
|
- Examples:
|
||||||
|
|
||||||
| ``p "John walks" | vp`` | generate a tree and show parse tree as .dot script
|
| ``p "John walks" | vp`` | generate a tree and show parse tree as .dot script
|
||||||
| ``gr | vp -view="open"`` | generate a tree and display parse tree on a Mac
|
| ``gr | vp -view=open`` | generate a tree and display parse tree on a Mac
|
||||||
|
| ``p "she loves us" | vp -view=open -showdep -file=uddeps.labels -nocat`` | show a visual variant of a dependency tree
|
||||||
|
|
||||||
|
|
||||||
#NORMAL
|
#NORMAL
|
||||||
@@ -1227,9 +1269,9 @@ flag -format.
|
|||||||
Prints a set of trees in the .dot format (the graphviz format).
|
Prints a set of trees in the .dot format (the graphviz format).
|
||||||
The graph can be saved in a file by the wf command as usual.
|
The graph can be saved in a file by the wf command as usual.
|
||||||
If the -view flag is defined, the graph is saved in a temporary file
|
If the -view flag is defined, the graph is saved in a temporary file
|
||||||
which is processed by graphviz and displayed by the program indicated
|
which is processed by dot (graphviz) and displayed by the command indicated
|
||||||
by the flag. The target format is postscript, unless overridden by the
|
by the view flag. The target format is postscript, unless overridden by the
|
||||||
flag -format.
|
flag -format. Results from multiple trees are combined to pdf with convert (ImageMagick).
|
||||||
With option -mk, use for showing library style function names of form 'mkC'.
|
With option -mk, use for showing library style function names of form 'mkC'.
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
Binary file not shown.
@@ -1,132 +0,0 @@
|
|||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
|
|
||||||
<head>
|
|
||||||
<title>GF People</title>
|
|
||||||
<meta charset="UTF-8">
|
|
||||||
<link rel=stylesheet href="../css/style.css">
|
|
||||||
<meta name = "viewport" content = "width = device-width">
|
|
||||||
</head>
|
|
||||||
|
|
||||||
<body>
|
|
||||||
|
|
||||||
<center>
|
|
||||||
<IMG SRC="Logos/gf0.png" alt="[GF]">
|
|
||||||
|
|
||||||
<h1>Grammatical Framework: Authors and Acknowledgements</h1>
|
|
||||||
|
|
||||||
</center>
|
|
||||||
|
|
||||||
The current developers and maintainers are
|
|
||||||
<a href="http://www.chalmers.se/cse/EN/organization/divisions/computing-science/people/angelov-krasimir">Krasimir Angelov</a>,
|
|
||||||
<a href="http://www.cse.chalmers.se/~hallgren/">Thomas Hallgren</a>,
|
|
||||||
and
|
|
||||||
<a href="http://www.cse.chalmers.se/~aarne/">Aarne Ranta</a>. Bug reports should be
|
|
||||||
posted via the
|
|
||||||
<a href="http://code.google.com/p/grammatical-framework/issues/list">GF bug tracker</a>.
|
|
||||||
|
|
||||||
|
|
||||||
<p>
|
|
||||||
|
|
||||||
Also the following people have contributed code to some of the versions:
|
|
||||||
|
|
||||||
<dl>
|
|
||||||
|
|
||||||
<dt>Grégoire Détrez (University of Gothenburg)
|
|
||||||
<dt>Ramona Enache (University of Gothenburg)
|
|
||||||
<dt>
|
|
||||||
<a href="http://www.cse.chalmers.se/alumni/bringert">Björn Bringert</a> (University of Gothenburg)
|
|
||||||
<dt>
|
|
||||||
Håkan Burden (University of Gothenburg)
|
|
||||||
<dt>
|
|
||||||
Hans-Joachim Daniels (Karlsruhe)
|
|
||||||
<dt>
|
|
||||||
<a href="http://www.cs.chalmers.se/~markus">Markus Forsberg</a> (Chalmers)
|
|
||||||
<dt>
|
|
||||||
<a href="http://www.cs.chalmers.se/~krijo">Kristofer Johannisson</a> (University of Gothenburg)
|
|
||||||
<dt>
|
|
||||||
<a href="http://www.cs.chalmers.se/~janna">Janna Khegai</a> (Chalmers)
|
|
||||||
<dt>
|
|
||||||
<a href="http://www.cse.chalmers.se/~peb">Peter Ljunglöf</a> (University of Gothenburg)
|
|
||||||
<dt>
|
|
||||||
Petri Mäenpää (Nokia)
|
|
||||||
</dl>
|
|
||||||
|
|
||||||
|
|
||||||
At least the following colleagues are thanked for suggestions,
|
|
||||||
bug reports, and other indirect contributions to the code. (Notice:
|
|
||||||
these are early contributors - the list has not been updated since 2004 or so).
|
|
||||||
|
|
||||||
<p>
|
|
||||||
|
|
||||||
<a href="http://www.di.unito.it/~stefano/">Stefano Berardi</a> (Torino),
|
|
||||||
|
|
||||||
Pascal Boldini (Paris),
|
|
||||||
|
|
||||||
<a href="http://www.dur.ac.uk/~dcs0pcc/">Paul Callaghan</a> (Durham),
|
|
||||||
|
|
||||||
Lauri Carlson (Helsinki),
|
|
||||||
|
|
||||||
<a href="http://www.cse.chalmers.se/~koen">Koen Claessen</a> (Chalmers),
|
|
||||||
|
|
||||||
<a href="http://www.cling.gu.se/~cooper">Robin Cooper</a> (Gothenburg),
|
|
||||||
|
|
||||||
<a href="http://www.cse.chalmers.se/~coquand">Thierry Coquand</a> (Chalmers),
|
|
||||||
|
|
||||||
<a
|
|
||||||
href="http://www.xrce.xerox.com/people/dymetman/dymetman.html">Marc
|
|
||||||
Dymetman</a> (XRCE),
|
|
||||||
|
|
||||||
Bertrand Grégoire (Tudor Institure, Luxembourg),
|
|
||||||
|
|
||||||
<a href="http://www.cse.chalmers.se/~reiner">Reiner Hähnle</a> (Chalmers),
|
|
||||||
|
|
||||||
<a href="http://pauillac.inria.fr/~huet/">Gérard Huet</a> (INRIA),
|
|
||||||
|
|
||||||
<a href="http://www.cse.chalmers.se/~patrikj">Patrik Jansson</a> (Chalmers),
|
|
||||||
|
|
||||||
Bernard Jaulin (Paris),
|
|
||||||
|
|
||||||
<a href="http://www.xrce.xerox.com/people/karttunen/karttunen.html">
|
|
||||||
Lauri Karttunen</a> (PARC),
|
|
||||||
|
|
||||||
Matti Kinnunen (Nokia),
|
|
||||||
|
|
||||||
<a
|
|
||||||
href="http://www.xrce.xerox.com/people/lux/">Veronika
|
|
||||||
Lux</a> (XRCE),
|
|
||||||
|
|
||||||
Per Martin-Löf (Stockholm),
|
|
||||||
|
|
||||||
<a href="http://www.cse.chalmers.se/~bengt">Bengt Nordström</a> (Chalmers),
|
|
||||||
|
|
||||||
<a
|
|
||||||
href="http://www.cis.uni-muenchen.de/studenten/stud_homepages/okrslar/reklame.html">
|
|
||||||
Martin Okrslar</a> (CIS),
|
|
||||||
|
|
||||||
Jianmin Pang (Durham),
|
|
||||||
|
|
||||||
<a
|
|
||||||
href="http://www.xrce.xerox.com/people/pogodalla/index.fr.html">Sylvain
|
|
||||||
Pogodalla</a> (XRCE),
|
|
||||||
|
|
||||||
<a href="http://www.inria.fr/Loic.Pottier">Loïc Pottier</a> (INRIA),
|
|
||||||
|
|
||||||
|
|
||||||
<a href="http://www2.parc.com/istl/members/zaenen/">Annie Zaenen</a> (PARC)
|
|
||||||
|
|
||||||
<p>
|
|
||||||
|
|
||||||
The GF logo was designed by Uula Ranta.
|
|
||||||
|
|
||||||
<p>
|
|
||||||
|
|
||||||
From 2001 to 2004, GF enjoyed funding from the
|
|
||||||
<a href="http://www.vinnova.se">Vinnova</a> foundation, within the
|
|
||||||
<a href="http://www.cse.chalmers.se/research/group/Language-technology/ILT.html">
|
|
||||||
Interactive Languge Technology</a> project.
|
|
||||||
|
|
||||||
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
|
|
||||||
63
doc/gf-people.md
Normal file
63
doc/gf-people.md
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
---
|
||||||
|
title: "Grammatical Framework: Authors and Acknowledgements"
|
||||||
|
---
|
||||||
|
|
||||||
|
## Current maintainers
|
||||||
|
|
||||||
|
The current maintainers of GF are
|
||||||
|
|
||||||
|
[Krasimir Angelov](http://www.chalmers.se/cse/EN/organization/divisions/computing-science/people/angelov-krasimir),
|
||||||
|
[Thomas Hallgren](http://www.cse.chalmers.se/~hallgren/),
|
||||||
|
[Aarne Ranta](http://www.cse.chalmers.se/~aarne/),
|
||||||
|
[John J. Camilleri](http://johnjcamilleri.com), and
|
||||||
|
[Inari Listenmaa](https://inariksit.github.io/).
|
||||||
|
|
||||||
|
This page is otherwise not up to date.
|
||||||
|
For detailed data about contributors to the code repositories since 2007, see
|
||||||
|
[here (gf-core)](https://github.com/GrammaticalFramework/gf-core/graphs/contributors)
|
||||||
|
and
|
||||||
|
[here (gf-rgl)](https://github.com/GrammaticalFramework/gf-rgl/graphs/contributors).
|
||||||
|
|
||||||
|
## Previous contributors
|
||||||
|
|
||||||
|
The following people have contributed code to some of the versions:
|
||||||
|
|
||||||
|
- Grégoire Détrez (University of Gothenburg)
|
||||||
|
- Ramona Enache (University of Gothenburg)
|
||||||
|
- [Björn Bringert](http://www.cse.chalmers.se/alumni/bringert) (University of Gothenburg)
|
||||||
|
- Håkan Burden (University of Gothenburg)
|
||||||
|
- Hans-Joachim Daniels (Karlsruhe)
|
||||||
|
- [Markus Forsberg](http://www.cs.chalmers.se/~markus) (Chalmers)
|
||||||
|
- [Kristofer Johannisson](http://www.cs.chalmers.se/~krijo) (University of Gothenburg)
|
||||||
|
- [Janna Khegai](http://www.cs.chalmers.se/~janna) (Chalmers)
|
||||||
|
- [Peter Ljunglöf](http://www.cse.chalmers.se/~peb) (University of Gothenburg)
|
||||||
|
- Petri Mäenpää (Nokia)
|
||||||
|
|
||||||
|
At least the following colleagues are thanked for suggestions, bug
|
||||||
|
reports, and other indirect contributions to the code.
|
||||||
|
|
||||||
|
- [Stefano Berardi](http://www.di.unito.it/~stefano/) (Torino)
|
||||||
|
- Pascal Boldini (Paris)
|
||||||
|
- [Paul Callaghan](http://www.dur.ac.uk/~dcs0pcc/) (Durham)
|
||||||
|
- Lauri Carlson (Helsinki)
|
||||||
|
- [Koen Claessen](http://www.cse.chalmers.se/~koen) (Chalmers)
|
||||||
|
- [Robin Cooper](http://www.cling.gu.se/~cooper) (Gothenburg)
|
||||||
|
- [Thierry Coquand](http://www.cse.chalmers.se/~coquand) (Chalmers)
|
||||||
|
- [Marc Dymetman](http://www.xrce.xerox.com/people/dymetman/dymetman.html) (XRCE)
|
||||||
|
- Bertrand Grégoire (Tudor Institute, Luxembourg)
|
||||||
|
- [Reiner Hähnle](http://www.cse.chalmers.se/~reiner) (Chalmers)
|
||||||
|
- [Gérard Huet](http://pauillac.inria.fr/~huet/) (INRIA)
|
||||||
|
- [Patrik Jansson](http://www.cse.chalmers.se/~patrikj) (Chalmers)
|
||||||
|
- Bernard Jaulin (Paris)
|
||||||
|
- [Lauri Karttunen](http://www.xrce.xerox.com/people/karttunen/karttunen.html) (PARC)
|
||||||
|
- Matti Kinnunen (Nokia)
|
||||||
|
- [Veronika Lux](http://www.xrce.xerox.com/people/lux/) (XRCE)
|
||||||
|
- Per Martin-Löf (Stockholm)
|
||||||
|
- [Bengt Nordström](http://www.cse.chalmers.se/~bengt) (Chalmers)
|
||||||
|
- [Martin Okrslar](http://www.cis.uni-muenchen.de/studenten/stud_homepages/okrslar/reklame.html) (CIS)
|
||||||
|
- Jianmin Pang (Durham)
|
||||||
|
- [Sylvain Pogodalla](http://www.xrce.xerox.com/people/pogodalla/index.fr.html) (XRCE)
|
||||||
|
- [Loïc Pottier](http://www.inria.fr/Loic.Pottier) (INRIA)
|
||||||
|
- [Annie Zaenen](http://www2.parc.com/istl/members/zaenen/) (PARC)
|
||||||
|
|
||||||
|
The GF logo was designed by Uula Ranta.
|
||||||
@@ -1,158 +0,0 @@
|
|||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<title>GF Quickstart</title>
|
|
||||||
<link rel=stylesheet href="../css/style.css">
|
|
||||||
<meta name = "viewport" content = "width = device-width">
|
|
||||||
</head>
|
|
||||||
|
|
||||||
|
|
||||||
<body>
|
|
||||||
|
|
||||||
<center>
|
|
||||||
<img src="Logos/gf0.png">
|
|
||||||
<p>
|
|
||||||
Aarne Ranta
|
|
||||||
<p>
|
|
||||||
October 2011 for GF 3.3
|
|
||||||
|
|
||||||
<p>
|
|
||||||
|
|
||||||
<h1>Grammatical Framework Quick Start</h1>
|
|
||||||
|
|
||||||
</center>
|
|
||||||
|
|
||||||
This Quick Start shows a few examples of how GF can be used.
|
|
||||||
We assume that you have downloaded and installed GF, so that
|
|
||||||
the command <tt>gf</tt> works for you. See download and install
|
|
||||||
instructions <a href="../download/index.html">here</a>.
|
|
||||||
|
|
||||||
<h2>Want to try without downloading?</h2>
|
|
||||||
|
|
||||||
<a href="../demos/phrasebook/">Using GF translation</a> with an existing grammar.
|
|
||||||
|
|
||||||
<p>
|
|
||||||
|
|
||||||
<a href="../demos/gfse/">Writing GF grammars</a> in the cloud, without installing GF.
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
<h2>Using GF for translation and generation</h2>
|
|
||||||
|
|
||||||
When you have downloaded and installed GF:
|
|
||||||
<ol>
|
|
||||||
<li> Copy the files
|
|
||||||
<a href="../examples/tutorial/food/Food.gf"><tt>Food.gf</tt></a>,
|
|
||||||
<a href="../examples/tutorial/food/FoodEng.gf"><tt>FoodEng.gf</tt></a>, and
|
|
||||||
<a href="../examples/tutorial/food/FoodIta.gf"><tt>FoodIta.gf</tt></a>.
|
|
||||||
Or go to <tt>GF/examples/tutorial/food/</tt>, if you have downloaded the
|
|
||||||
GF sources.
|
|
||||||
|
|
||||||
<li> Start GF with the shell command (without the prompt <tt>$</tt>)
|
|
||||||
<pre>
|
|
||||||
$ gf FoodIta.gf FoodEng.gf
|
|
||||||
</pre>
|
|
||||||
Alternatively, start GF with <tt>gf</tt> and give the GF command <tt>import FoodIta.gf FoodEng.gf</tt>.
|
|
||||||
|
|
||||||
<li> <b>Translation</b>. Try your first translation by giving the GF command
|
|
||||||
<pre>
|
|
||||||
parse "this cheese is very very Italian" | linearize
|
|
||||||
</pre>
|
|
||||||
Notice that the parser accept the tabulator for word completion.
|
|
||||||
|
|
||||||
<li> <b>Generation</b>. Random-generate sentences in two languages:
|
|
||||||
<pre>
|
|
||||||
generate_random | linearize
|
|
||||||
</pre>
|
|
||||||
|
|
||||||
<li> <b>Other commands</b>. Use the help command
|
|
||||||
<pre>
|
|
||||||
help
|
|
||||||
</pre>
|
|
||||||
<li> <b>More examples</b>. Go to <tt>GF/examples/phrasebook</tt> or some other
|
|
||||||
subdirectory of <tt>GF/examples/</tt>. Or try a resource grammar by, for instance,
|
|
||||||
<pre>
|
|
||||||
import alltenses/LangEng.gfo alltenses/LangGer.gfo
|
|
||||||
|
|
||||||
parse -lang=Eng "I love you" | linearize -treebank
|
|
||||||
</pre>
|
|
||||||
The resource grammars are found relative to the value of <tt>GF_LIB_PATH</tt>, which
|
|
||||||
you may have to set; see <a href="../download/index.html">here</a> for instructions.
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
</ol>
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
<h2>Grammar development</h2>
|
|
||||||
|
|
||||||
Add words to the <tt>Food</tt>
|
|
||||||
grammars and try the above commands again. For instance, add the following lines:
|
|
||||||
<pre>
|
|
||||||
Bread : Kind ; -- in Food.gf
|
|
||||||
Bread = {s = "bread"} ; -- in FoodEng.gf
|
|
||||||
Bread = {s = "pane"} ; -- in FoodIta.gf
|
|
||||||
</pre>
|
|
||||||
and start GF again with the same command. Now you can even translate
|
|
||||||
<i>this bread is very Italian</i>.
|
|
||||||
</ol>
|
|
||||||
To lear more on GF commands and
|
|
||||||
grammar development, go to the one of the tutorials:
|
|
||||||
<ul>
|
|
||||||
<li> <a href="tutorial/gf-tutorial.html">GF Tutorial</a>: older, more programmer-oriented
|
|
||||||
<li> <a href="gf-lrec-2010.pdf">GF Resource Tutorial</a>: newer, more linguist-oriented
|
|
||||||
</ul>
|
|
||||||
To learn about how GF is used for easily writing grammars for 16 languages, consult the
|
|
||||||
<ul>
|
|
||||||
<li> <a href="../lib/doc/synopsis.html">GF Resource Grammar Library</a>.
|
|
||||||
</ul>
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
<h2>Run-time grammars and web applications</h2>
|
|
||||||
|
|
||||||
GF has its own "machine language", PGF (Portable Grammar Format),
|
|
||||||
which is recommended for use in applications at run time. To produce a PGF file from
|
|
||||||
the two grammars above, do
|
|
||||||
<pre>
|
|
||||||
gf -make FoodIta.gf FoodEng.gf
|
|
||||||
wrote Food.pgf
|
|
||||||
</pre>
|
|
||||||
You can use this in Haskell and Java programs, and also on web services, such as
|
|
||||||
<ul>
|
|
||||||
<li> the
|
|
||||||
<a href="http://cloud.grammaticalframework.org/minibar/minibar.html">minibar</a>
|
|
||||||
fridge magnets
|
|
||||||
</ul>
|
|
||||||
|
|
||||||
The quickest way to provide a GF web service is to start GF with the <tt>-server</tt> option:
|
|
||||||
<pre>
|
|
||||||
$ gf -server
|
|
||||||
This is GF version 3.3
|
|
||||||
Built on linux/i386 with ghc-7.0, flags: interrupt server cclazy
|
|
||||||
Document root = /usr/local/share/gf-3.3/www
|
|
||||||
Starting HTTP server, open http://localhost:41296/ in your web browser.
|
|
||||||
</pre>
|
|
||||||
You can view it locally by pointing your
|
|
||||||
browser to the URL shown. You can add your own <tt>.pgf</tt> grammar to the service by
|
|
||||||
copying it over to the <tt>documentRoot</tt> directory. Just push "reload" in
|
|
||||||
your browser after each such update.
|
|
||||||
|
|
||||||
<p>
|
|
||||||
|
|
||||||
To build more customized web application, consult the
|
|
||||||
<a href="http://code.google.com/p/grammatical-framework/wiki/SideBar?tm=6">developer wiki</a>.
|
|
||||||
|
|
||||||
|
|
||||||
<h2>User group</h2>
|
|
||||||
|
|
||||||
You are welcome to join the <A HREF="http://groups.google.com/group/gf-dev">User Group</A>
|
|
||||||
to get help and discuss GF-related issues!
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
</body></html>
|
|
||||||
Binary file not shown.
@@ -1,493 +0,0 @@
|
|||||||
GF Quick Reference
|
|
||||||
Aarne Ranta
|
|
||||||
April 4, 2006
|
|
||||||
|
|
||||||
% NOTE: this is a txt2tags file.
|
|
||||||
% Create an html file from this file using:
|
|
||||||
% txt2tags -thtml gf-reference.t2t
|
|
||||||
|
|
||||||
%!style:../css/style.css
|
|
||||||
%!target:html
|
|
||||||
%!options: --toc
|
|
||||||
%!postproc(html): <TITLE> <meta name = "viewport" content = "width = device-width"><TITLE>
|
|
||||||
%!postproc(html): <H1> <H1><a href="../"><IMG src="../doc/Logos/gf0.png"></a>
|
|
||||||
|
|
||||||
This is a quick reference on GF grammars. It aims to
|
|
||||||
cover all forms of expression available when writing
|
|
||||||
grammars. It assumes basic knowledge of GF, which
|
|
||||||
can be acquired from the
|
|
||||||
[GF Tutorial http://www.grammaticalframework.org/doc/tutorial/gf-tutorial.html].
|
|
||||||
Help on GF commands is obtained on line by the
|
|
||||||
help command (``help``), and help on invoking
|
|
||||||
GF with (``gf -help``).
|
|
||||||
|
|
||||||
|
|
||||||
===A complete example===
|
|
||||||
|
|
||||||
This is a complete example of a GF grammar divided
|
|
||||||
into three modules in files. The grammar recognizes the
|
|
||||||
phrases //one pizza// and //two pizzas//.
|
|
||||||
|
|
||||||
File ``Order.gf``:
|
|
||||||
```
|
|
||||||
abstract Order = {
|
|
||||||
cat
|
|
||||||
Order ;
|
|
||||||
Item ;
|
|
||||||
fun
|
|
||||||
One, Two : Item -> Order ;
|
|
||||||
Pizza : Item ;
|
|
||||||
}
|
|
||||||
```
|
|
||||||
File ``OrderEng.gf`` (the top file):
|
|
||||||
```
|
|
||||||
--# -path=.:prelude
|
|
||||||
concrete OrderEng of Order =
|
|
||||||
open Res, Prelude in {
|
|
||||||
flags startcat=Order ;
|
|
||||||
lincat
|
|
||||||
Order = SS ;
|
|
||||||
Item = {s : Num => Str} ;
|
|
||||||
lin
|
|
||||||
One it = ss ("one" ++ it.s ! Sg) ;
|
|
||||||
Two it = ss ("two" ++ it.s ! Pl) ;
|
|
||||||
Pizza = regNoun "pizza" ;
|
|
||||||
}
|
|
||||||
```
|
|
||||||
File ``Res.gf``:
|
|
||||||
```
|
|
||||||
resource Res = open Prelude in {
|
|
||||||
param Num = Sg | Pl ;
|
|
||||||
oper regNoun : Str -> {s : Num => Str} =
|
|
||||||
\dog -> {s = table {
|
|
||||||
Sg => dog ;
|
|
||||||
_ => dog + "s"
|
|
||||||
}
|
|
||||||
} ;
|
|
||||||
}
|
|
||||||
```
|
|
||||||
To use this example, do
|
|
||||||
```
|
|
||||||
% gf -- in shell: start GF
|
|
||||||
> i OrderEng.gf -- in GF: import grammar
|
|
||||||
> p "one pizza" -- parse string
|
|
||||||
> l Two Pizza -- linearize tree
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
===Modules and files===
|
|
||||||
|
|
||||||
One module per file.
|
|
||||||
File named ``Foo.gf`` contains module named
|
|
||||||
``Foo``.
|
|
||||||
|
|
||||||
Each module has the structure
|
|
||||||
```
|
|
||||||
moduletypename =
|
|
||||||
Inherits ** -- optional
|
|
||||||
open Opens in -- optional
|
|
||||||
{ Judgements }
|
|
||||||
```
|
|
||||||
Inherits are names of modules of the same type.
|
|
||||||
Inheritance can be restricted:
|
|
||||||
```
|
|
||||||
Mo[f,g], -- inherit only f,g from Mo
|
|
||||||
Lo-[f,g] -- inheris all but f,g from Lo
|
|
||||||
```
|
|
||||||
Opens are possible in ``concrete`` and ``resource``.
|
|
||||||
They are names of modules of these two types, possibly
|
|
||||||
qualified:
|
|
||||||
```
|
|
||||||
(M = Mo), -- refer to f as M.f or Mo.f
|
|
||||||
(Lo = Lo) -- refer to f as Lo.f
|
|
||||||
```
|
|
||||||
Module types and judgements in them:
|
|
||||||
```
|
|
||||||
abstract A -- cat, fun, def, data
|
|
||||||
concrete C of A -- lincat, lin, lindef, printname
|
|
||||||
resource R -- param, oper
|
|
||||||
|
|
||||||
interface I -- like resource, but can have
|
|
||||||
oper f : T without definition
|
|
||||||
instance J of I -- like resource, defines opers
|
|
||||||
that I leaves undefined
|
|
||||||
incomplete -- functor: concrete that opens
|
|
||||||
concrete CI of A = one or more interfaces
|
|
||||||
open I in ...
|
|
||||||
concrete CJ of A = -- completion: concrete that
|
|
||||||
CI with instantiates a functor by
|
|
||||||
(I = J) instances of open interfaces
|
|
||||||
```
|
|
||||||
The forms
|
|
||||||
``param``, ``oper``
|
|
||||||
may appear in ``concrete`` as well, but are then
|
|
||||||
not inherited to extensions.
|
|
||||||
|
|
||||||
All modules can moreover have ``flags`` and comments.
|
|
||||||
Comments have the forms
|
|
||||||
```
|
|
||||||
-- till the end of line
|
|
||||||
{- any number of lines between -}
|
|
||||||
--# used for compiler pragmas
|
|
||||||
```
|
|
||||||
A ``concrete`` can be opened like a ``resource``.
|
|
||||||
It is translated as follows:
|
|
||||||
```
|
|
||||||
cat C ---> oper C : Type =
|
|
||||||
lincat C = T T ** {lock_C : {}}
|
|
||||||
|
|
||||||
fun f : G -> C ---> oper f : A* -> C* = \g ->
|
|
||||||
lin f = t t g ** {lock_C = <>}
|
|
||||||
```
|
|
||||||
An ``abstract`` can be opened like an ``interface``.
|
|
||||||
Any ``concrete`` of it then works as an ``instance``.
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
===Judgements===
|
|
||||||
|
|
||||||
```
|
|
||||||
cat C -- declare category C
|
|
||||||
cat C (x:A)(y:B x) -- dependent category C
|
|
||||||
cat C A B -- same as C (x : A)(y : B)
|
|
||||||
fun f : T -- declare function f of type T
|
|
||||||
def f = t -- define f as t
|
|
||||||
def f p q = t -- define f by pattern matching
|
|
||||||
data C = f | g -- set f,g as constructors of C
|
|
||||||
data f : A -> C -- same as
|
|
||||||
fun f : A -> C; data C=f
|
|
||||||
|
|
||||||
lincat C = T -- define lin.type of cat C
|
|
||||||
lin f = t -- define lin. of fun f
|
|
||||||
lin f x y = t -- same as lin f = \x y -> t
|
|
||||||
lindef C = \s -> t -- default lin. of cat C
|
|
||||||
printname fun f = s -- printname shown in menus
|
|
||||||
printname cat C = s -- printname shown in menus
|
|
||||||
printname f = s -- same as printname fun f = s
|
|
||||||
|
|
||||||
param P = C | D Q R -- define parameter type P
|
|
||||||
with constructors
|
|
||||||
C : P, D : Q -> R -> P
|
|
||||||
oper h : T = t -- define oper h of type T
|
|
||||||
oper h = t -- omit type, if inferrable
|
|
||||||
|
|
||||||
flags p=v -- set value of flag p
|
|
||||||
```
|
|
||||||
Judgements are terminated by semicolons (``;``).
|
|
||||||
Subsequent judgments of the same form may share the
|
|
||||||
keyword:
|
|
||||||
```
|
|
||||||
cat C ; D ; -- same as cat C ; cat D ;
|
|
||||||
```
|
|
||||||
Judgements can also share RHS:
|
|
||||||
```
|
|
||||||
fun f,g : A -- same as fun f : A ; g : A
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
===Types===
|
|
||||||
|
|
||||||
Abstract syntax (in ``fun``):
|
|
||||||
```
|
|
||||||
C -- basic type, if cat C
|
|
||||||
C a b -- basic type for dep. category
|
|
||||||
(x : A) -> B -- dep. functions from A to B
|
|
||||||
(_ : A) -> B -- nondep. functions from A to B
|
|
||||||
(p,q : A) -> B -- same as (p : A)-> (q : A) -> B
|
|
||||||
A -> B -- same as (_ : A) -> B
|
|
||||||
Int -- predefined integer type
|
|
||||||
Float -- predefined float type
|
|
||||||
String -- predefined string type
|
|
||||||
```
|
|
||||||
Concrete syntax (in ``lincat``):
|
|
||||||
```
|
|
||||||
Str -- token lists
|
|
||||||
P -- parameter type, if param P
|
|
||||||
P => B -- table type, if P param. type
|
|
||||||
{s : Str ; p : P}-- record type
|
|
||||||
{s,t : Str} -- same as {s : Str ; t : Str}
|
|
||||||
{a : A} **{b : B}-- record type extension, same as
|
|
||||||
{a : A ; b : B}
|
|
||||||
A * B * C -- tuple type, same as
|
|
||||||
{p1 : A ; p2 : B ; p3 : C}
|
|
||||||
Ints n -- type of n first integers
|
|
||||||
```
|
|
||||||
Resource (in ``oper``): all those of concrete, plus
|
|
||||||
```
|
|
||||||
Tok -- tokens (subtype of Str)
|
|
||||||
A -> B -- functions from A to B
|
|
||||||
Int -- integers
|
|
||||||
Strs -- list of prefixes (for pre)
|
|
||||||
PType -- parameter type
|
|
||||||
Type -- any type
|
|
||||||
```
|
|
||||||
As parameter types, one can use any finite type:
|
|
||||||
``P`` defined in ``param P``,
|
|
||||||
``Ints n``, and record types of parameter types.
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
===Expressions===
|
|
||||||
|
|
||||||
Syntax trees = full function applications
|
|
||||||
```
|
|
||||||
f a b -- : C if fun f : A -> B -> C
|
|
||||||
1977 -- : Int
|
|
||||||
3.14 -- : Float
|
|
||||||
"foo" -- : String
|
|
||||||
```
|
|
||||||
Higher-Order Abstract syntax (HOAS): functions as arguments:
|
|
||||||
```
|
|
||||||
F a (\x -> c) -- : C if a : A, c : C (x : B),
|
|
||||||
fun F : A -> (B -> C) -> C
|
|
||||||
```
|
|
||||||
Tokens and token lists
|
|
||||||
```
|
|
||||||
"hello" -- : Tok, singleton Str
|
|
||||||
"hello" ++ "world" -- : Str
|
|
||||||
["hello world"] -- : Str, same as "hello" ++ "world"
|
|
||||||
"hello" + "world" -- : Tok, computes to "helloworld"
|
|
||||||
[] -- : Str, empty list
|
|
||||||
```
|
|
||||||
Parameters
|
|
||||||
```
|
|
||||||
Sg -- atomic constructor
|
|
||||||
VPres Sg P2 -- applied constructor
|
|
||||||
{n = Sg ; p = P3} -- record of parameters
|
|
||||||
```
|
|
||||||
Tables
|
|
||||||
```
|
|
||||||
table { -- by full branches
|
|
||||||
Sg => "mouse" ;
|
|
||||||
Pl => "mice"
|
|
||||||
}
|
|
||||||
table { -- by pattern matching
|
|
||||||
Pl => "mice" ;
|
|
||||||
_ => "mouse" -- wildcard pattern
|
|
||||||
}
|
|
||||||
table {
|
|
||||||
n => regn n "cat" -- variable pattern
|
|
||||||
}
|
|
||||||
table Num {...} -- table given with arg. type
|
|
||||||
table ["ox"; "oxen"] -- table as course of values
|
|
||||||
\\_ => "fish" -- same as table {_ => "fish"}
|
|
||||||
\\p,q => t -- same as \\p => \\q => t
|
|
||||||
|
|
||||||
t ! p -- select p from table t
|
|
||||||
case e of {...} -- same as table {...} ! e
|
|
||||||
```
|
|
||||||
Records
|
|
||||||
```
|
|
||||||
{s = "Liz"; g = Fem} -- record in full form
|
|
||||||
{s,t = "et"} -- same as {s = "et";t= "et"}
|
|
||||||
{s = "Liz"} ** -- record extension: same as
|
|
||||||
{g = Fem} {s = "Liz" ; g = Fem}
|
|
||||||
|
|
||||||
<a,b,c> -- tuple, same as {p1=a;p2=b;p3=c}
|
|
||||||
```
|
|
||||||
Functions
|
|
||||||
```
|
|
||||||
\x -> t -- lambda abstract
|
|
||||||
\x,y -> t -- same as \x -> \y -> t
|
|
||||||
\x,_ -> t -- binding not in t
|
|
||||||
```
|
|
||||||
Local definitions
|
|
||||||
```
|
|
||||||
let x : A = d in t -- let definition
|
|
||||||
let x = d in t -- let defin, type inferred
|
|
||||||
let x=d ; y=e in t -- same as
|
|
||||||
let x=d in let y=e in t
|
|
||||||
let {...} in t -- same as let ... in t
|
|
||||||
|
|
||||||
t where {...} -- same as let ... in t
|
|
||||||
```
|
|
||||||
Free variation
|
|
||||||
```
|
|
||||||
variants {x ; y} -- both x and y possible
|
|
||||||
variants {} -- nothing possible
|
|
||||||
```
|
|
||||||
Prefix-dependent choices
|
|
||||||
```
|
|
||||||
pre {"a" ; "an" / v} -- "an" before v, "a" otherw.
|
|
||||||
strs {"a" ; "i" ;"o"}-- list of condition prefixes
|
|
||||||
```
|
|
||||||
Typed expression
|
|
||||||
```
|
|
||||||
<t:T> -- same as t, to help type inference
|
|
||||||
```
|
|
||||||
Accessing bound variables in ``lin``: use fields ``$1, $2, $3,...``.
|
|
||||||
Example:
|
|
||||||
```
|
|
||||||
fun F : (A : Set) -> (El A -> Prop) -> Prop ;
|
|
||||||
lin F A B = {s = ["for all"] ++ A.s ++ B.$1 ++ B.s}
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
===Pattern matching===
|
|
||||||
|
|
||||||
These patterns can be used in branches of ``table`` and
|
|
||||||
``case`` expressions. Patterns are matched in the order in
|
|
||||||
which they appear in the grammar.
|
|
||||||
```
|
|
||||||
C -- atomic param constructor
|
|
||||||
C p q -- param constr. applied to patterns
|
|
||||||
x -- variable, matches anything
|
|
||||||
_ -- wildcard, matches anything
|
|
||||||
"foo" -- string
|
|
||||||
56 -- integer
|
|
||||||
{s = p ; y = q} -- record, matches extensions too
|
|
||||||
<p,q> -- tuple, same as {p1=p ; p2=q}
|
|
||||||
p | q -- disjunction, binds to first match
|
|
||||||
x@p -- binds x to what p matches
|
|
||||||
- p -- negation
|
|
||||||
p + "s" -- sequence of two string patterns
|
|
||||||
p* -- repetition of a string pattern
|
|
||||||
```
|
|
||||||
|
|
||||||
===Sample library functions===
|
|
||||||
|
|
||||||
```
|
|
||||||
-- lib/prelude/Predef.gf
|
|
||||||
drop : Int -> Tok -> Tok -- drop prefix of length
|
|
||||||
take : Int -> Tok -> Tok -- take prefix of length
|
|
||||||
tk : Int -> Tok -> Tok -- drop suffix of length
|
|
||||||
dp : Int -> Tok -> Tok -- take suffix of length
|
|
||||||
occur : Tok -> Tok -> PBool -- test if substring
|
|
||||||
occurs : Tok -> Tok -> PBool -- test if any char occurs
|
|
||||||
show : (P:Type) -> P ->Tok -- param to string
|
|
||||||
read : (P:Type) -> Tok-> P -- string to param
|
|
||||||
toStr : (L:Type) -> L ->Str -- find "first" string
|
|
||||||
|
|
||||||
-- lib/prelude/Prelude.gf
|
|
||||||
param Bool = True | False
|
|
||||||
oper
|
|
||||||
SS : Type -- the type {s : Str}
|
|
||||||
ss : Str -> SS -- construct SS
|
|
||||||
cc2 : (_,_ : SS) -> SS -- concat SS's
|
|
||||||
optStr : Str -> Str -- string or empty
|
|
||||||
strOpt : Str -> Str -- empty or string
|
|
||||||
bothWays : Str -> Str -> Str -- X++Y or Y++X
|
|
||||||
init : Tok -> Tok -- all but last char
|
|
||||||
last : Tok -> Tok -- last char
|
|
||||||
prefixSS : Str -> SS -> SS
|
|
||||||
postfixSS : Str -> SS -> SS
|
|
||||||
infixSS : Str -> SS -> SS -> SS
|
|
||||||
if_then_else : (A : Type) -> Bool -> A -> A -> A
|
|
||||||
if_then_Str : Bool -> Str -> Str -> Str
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
===Flags===
|
|
||||||
|
|
||||||
Flags can appear, with growing priority,
|
|
||||||
- in files, judgement ``flags`` and without dash (``-``)
|
|
||||||
- as flags to ``gf`` when invoked, with dash
|
|
||||||
- as flags to various GF commands, with dash
|
|
||||||
|
|
||||||
|
|
||||||
Some common flags used in grammars:
|
|
||||||
```
|
|
||||||
startcat=cat use this category as default
|
|
||||||
|
|
||||||
lexer=literals int and string literals recognized
|
|
||||||
lexer=code like program code
|
|
||||||
lexer=text like text: spacing, capitals
|
|
||||||
lexer=textlit text, unknowns as string lits
|
|
||||||
|
|
||||||
unlexer=code like program code
|
|
||||||
unlexer=codelit code, remove string lit quotes
|
|
||||||
unlexer=text like text: punctuation, capitals
|
|
||||||
unlexer=textlit text, remove string lit quotes
|
|
||||||
unlexer=concat remove all spaces
|
|
||||||
unlexer=bind remove spaces around "&+"
|
|
||||||
|
|
||||||
optimize=all_subs best for almost any concrete
|
|
||||||
optimize=values good for lexicon concrete
|
|
||||||
optimize=all usually good for resource
|
|
||||||
optimize=noexpand for resource, if =all too big
|
|
||||||
```
|
|
||||||
For the full set of values for ``FLAG``,
|
|
||||||
use on-line ``h -FLAG``.
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
===File import search paths===
|
|
||||||
|
|
||||||
Colon-separated list of directories searched in the
|
|
||||||
given order:
|
|
||||||
```
|
|
||||||
--# -path=.:../abstract:../common:prelude
|
|
||||||
```
|
|
||||||
This can be (in order of increasing priority), as
|
|
||||||
first line in the file, as flag to ``gf``
|
|
||||||
when invoked, or as flag to the ``i`` command.
|
|
||||||
The prefix ``--#`` is used only in files.
|
|
||||||
|
|
||||||
GF attempts to satisfy an ``import`` command by searching for the
|
|
||||||
import filename in the above search paths, initially qualified
|
|
||||||
relative to the current working directory. If the file is not found in
|
|
||||||
that initial expansion, the search paths are re-qualified relative to
|
|
||||||
the directories given in the ``GF_LIB_PATH`` environment variable. If
|
|
||||||
``GF_LIB_PATH`` is not defined, its default value is
|
|
||||||
``/usr/local/share/gf-3.9/lib`` (assuming you have GF version 3.9).
|
|
||||||
|
|
||||||
If your GF resource grammar libraries are installed somewhere else,
|
|
||||||
you will want to set ``GF_LIB_PATH`` to point there instead. In a
|
|
||||||
pinch, you can point to the ``GF/lib/src/`` folder in your clone of
|
|
||||||
the GF source code repository.
|
|
||||||
|
|
||||||
Developers of resource grammars may find it useful to define multiple
|
|
||||||
directories, colon-separated, in ``GF_LIB_PATH``.
|
|
||||||
|
|
||||||
|
|
||||||
===Alternative grammar formats===
|
|
||||||
|
|
||||||
**Old GF** (before GF 2.0):
|
|
||||||
all judgements in any kinds of modules,
|
|
||||||
division into files uses ``include``s.
|
|
||||||
A file ``Foo.gf`` is recognized as the old format
|
|
||||||
if it lacks a module header.
|
|
||||||
|
|
||||||
**Context-free** (file ``foo.cf``). The form of rules is e.g.
|
|
||||||
```
|
|
||||||
Fun. S ::= NP "is" AP ;
|
|
||||||
```
|
|
||||||
If ``Fun`` is omitted, it is generated automatically.
|
|
||||||
Rules must be one per line. The RHS can be empty.
|
|
||||||
|
|
||||||
**Extended BNF** (file ``foo.ebnf``). The form of rules is e.g.
|
|
||||||
```
|
|
||||||
S ::= (NP+ ("is" | "was") AP | V NP*) ;
|
|
||||||
```
|
|
||||||
where the RHS is a regular expression of categories
|
|
||||||
and quoted tokens: ``"foo", CAT, T U, T|U, T*, T+, T?``, or empty.
|
|
||||||
Rule labels are generated automatically.
|
|
||||||
|
|
||||||
|
|
||||||
**Probabilistic grammars** (not a separate format).
|
|
||||||
You can set the probability of a function ``f`` (in its value category) by
|
|
||||||
```
|
|
||||||
--# prob f 0.009
|
|
||||||
```
|
|
||||||
These are put into a file given to GF using the ``probs=File`` flag
|
|
||||||
on command line. This file can be the grammar file itself.
|
|
||||||
|
|
||||||
**Example-based grammars** (file ``foo.gfe``). Expressions of the form
|
|
||||||
```
|
|
||||||
in Cat "example string"
|
|
||||||
```
|
|
||||||
are preprocessed by using a parser given by the flag
|
|
||||||
```
|
|
||||||
--# -resource=File
|
|
||||||
```
|
|
||||||
and the result is written to ``foo.gf``.
|
|
||||||
|
|
||||||
|
|
||||||
===References===
|
|
||||||
|
|
||||||
[GF Homepage http://www.grammaticalframework.org/]
|
|
||||||
|
|
||||||
A. Ranta, Grammatical Framework: A Type-Theoretical Grammar Formalism.
|
|
||||||
//The Journal of Functional Programming//, vol. 14:2. 2004, pp. 145-189.
|
|
||||||
|
|
||||||
4605
doc/gf-refman.html
4605
doc/gf-refman.html
File diff suppressed because it is too large
Load Diff
2770
doc/gf-refman.md
Normal file
2770
doc/gf-refman.md
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,11 +1,8 @@
|
|||||||
The GF Software System
|
The GF Software System
|
||||||
|
|
||||||
|
|
||||||
%!style:../css/style.css
|
|
||||||
%!options(html): --toc
|
%!options(html): --toc
|
||||||
%!options(html): --toc-level=4
|
%!options(html): --toc-level=4
|
||||||
%!postproc(html): <TITLE> <meta name = "viewport" content = "width = device-width"><TITLE>
|
|
||||||
%!postproc(html): <H1> <H1><a href="../"><IMG src="../doc/Logos/gf0.png"></a>
|
|
||||||
%!postproc(html): "#VSPACE" "<hr>"
|
%!postproc(html): "#VSPACE" "<hr>"
|
||||||
%!postproc(html): "#NORMAL" ""
|
%!postproc(html): "#NORMAL" ""
|
||||||
%!postproc(html): "#TINY" ""
|
%!postproc(html): "#TINY" ""
|
||||||
@@ -107,5 +104,3 @@ To run GF from a //script//, redirection of standard input can be used:
|
|||||||
```
|
```
|
||||||
The file ``script.gfs`` should then contain a sequence of GF commands, one per line.
|
The file ``script.gfs`` should then contain a sequence of GF commands, one per line.
|
||||||
Unrecognized command lines are skipped without terminating GF.
|
Unrecognized command lines are skipped without terminating GF.
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,69 +0,0 @@
|
|||||||
<!DOCTYPE html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<title>GF Documentation</title>
|
|
||||||
<link rel=stylesheet href="../css/style.css">
|
|
||||||
</head>
|
|
||||||
|
|
||||||
|
|
||||||
<body>
|
|
||||||
|
|
||||||
<div class=center>
|
|
||||||
<a href="../"><img src="Logos/gf0.png"></a>
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
<h1>Grammatical Framework Documents</h1>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
|
|
||||||
<b>Top-5 documents</b>:
|
|
||||||
|
|
||||||
<a href="gf-quickstart.html">Quick start instruction</a>.
|
|
||||||
|
|
||||||
|
|
||||||
<a href="tutorial/gf-tutorial.html">Old Tutorial</a>, application-oriented.
|
|
||||||
|
|
||||||
<a href="gf-lrec-2010.pdf">New Tutorial</a>, linguistics-oriented.
|
|
||||||
|
|
||||||
<a href="gf-refman.html">ReferenceManual</a>.
|
|
||||||
|
|
||||||
<a href="../lib/resource/doc/synopsis.html">LibrarySynopsis</a>.
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
<h2>Language and system documentation</h2>
|
|
||||||
|
|
||||||
<ul>
|
|
||||||
|
|
||||||
<li>
|
|
||||||
<a href="gf-reference.html">GF Quick Reference</a>. Also available in
|
|
||||||
<a href="gf-reference.pdf">pdf</a>. Covers all features of GF language
|
|
||||||
in a summary format.
|
|
||||||
|
|
||||||
<li>
|
|
||||||
<a href="gf-refman.html">GF Reference Manual</a>. A full-scale reference
|
|
||||||
manual of the GF language.
|
|
||||||
|
|
||||||
<li>
|
|
||||||
<a href="gf-shell-reference.html">GF Shell Reference</a>.
|
|
||||||
Describes the commands available in the interactive GF shell. Also
|
|
||||||
summarizes how to run GF as a batch compiler.
|
|
||||||
|
|
||||||
<li>
|
|
||||||
<a href="gf-editor-modes.html">Editor modes for GF</a>.
|
|
||||||
Editor modes for GF provides syntax highligting, automatic indentation and
|
|
||||||
other features that makes editing GF grammar files easier.
|
|
||||||
|
|
||||||
</ul>
|
|
||||||
|
|
||||||
|
|
||||||
<h2>Publications</h2>
|
|
||||||
|
|
||||||
<a href="gf-bibliography.html">
|
|
||||||
Bibliography</a>: more publications on GF, as well as background literature.
|
|
||||||
|
|
||||||
|
|
||||||
</body></html>
|
|
||||||
13
doc/index.md
Normal file
13
doc/index.md
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
---
|
||||||
|
title: Grammatical Framework Documentation
|
||||||
|
---
|
||||||
|
|
||||||
|
Perhaps you're looking for one of the following:
|
||||||
|
|
||||||
|
- [Tutorial](tutorial/gf-tutorial.html). This is a hands-on introduction to grammar writing in GF.
|
||||||
|
- [Reference Manual](gf-refman.html). A full-scale reference manual of the GF language.
|
||||||
|
- [RGL Tutorial](../lib/doc/rgl-tutorial/index.html)
|
||||||
|
- [RGL Synopsis](../lib/doc/synopsis/index.html). Documentation of the Resource Grammar Library, including the syntax API and lexical paradigms for each language.
|
||||||
|
- [Shell Reference](gf-shell-reference.html). Describes the commands available in the interactive GF shell.
|
||||||
|
Also summarizes how to run GF as a batch compiler.
|
||||||
|
- [Developers Guide](gf-developers/html). Detailed information about building and developing GF.
|
||||||
@@ -1,29 +1,26 @@
|
|||||||
<html>
|
<!DOCTYPE html>
|
||||||
|
<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
|
||||||
<head>
|
<head>
|
||||||
|
<title>C Runtime API</title>
|
||||||
|
<meta charset="utf-8" />
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||||
|
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.1.3/css/bootstrap.min.css" integrity="sha384-MCw98/SFnGE8fJT3GXwEOngsV7Zt27NXFoaoApmYm81iuXoPkFOJwJ8ERdknLPMO" crossorigin="anonymous">
|
||||||
<style>
|
<style>
|
||||||
body { background: #eee; padding-top: 200px; }
|
pre {
|
||||||
|
background-color:#eee;
|
||||||
pre.python {background-color:#ffc; display: none}
|
margin-top: 1em;
|
||||||
pre.haskell {background-color:#ffc; display: block}
|
padding: 0.5em 1em;
|
||||||
pre.java {background-color:#ffc; display: none}
|
}
|
||||||
pre.csharp {background-color:#ffc; display: none}
|
pre.python {display: none}
|
||||||
|
pre.haskell {display: block}
|
||||||
|
pre.java {display: none}
|
||||||
|
pre.csharp {display: none}
|
||||||
span.python {display: none}
|
span.python {display: none}
|
||||||
span.haskell {display: inline}
|
span.haskell {display: inline}
|
||||||
span.java {display: none}
|
span.java {display: none}
|
||||||
span.csharp {display: none}
|
span.csharp {display: none}
|
||||||
|
|
||||||
.header {
|
|
||||||
position: fixed;
|
|
||||||
top: 0;
|
|
||||||
left: 0;
|
|
||||||
background: #ddd;
|
|
||||||
width: 100%;
|
|
||||||
padding: 5pt;
|
|
||||||
border-bottom: solid #bbb 2pt;
|
|
||||||
}
|
|
||||||
</style>
|
</style>
|
||||||
|
|
||||||
|
|
||||||
<script lang="javascript">
|
<script lang="javascript">
|
||||||
function change_language(href) {
|
function change_language(href) {
|
||||||
var name = href.split("#")[1];
|
var name = href.split("#")[1];
|
||||||
@@ -50,13 +47,27 @@
|
|||||||
</script>
|
</script>
|
||||||
</head>
|
</head>
|
||||||
<body onload="change_language(window.location.href); window.addEventListener('hashchange', function(e){change_language(window.location.href);});">
|
<body onload="change_language(window.location.href); window.addEventListener('hashchange', function(e){change_language(window.location.href);});">
|
||||||
<span class="header">
|
<div class="container-fluid" style="max-width: 1200px">
|
||||||
<h1>Using the <span class="python">Python</span> <span class="haskell">Haskell</span> <span class="java">Java</span> <span class="csharp">C#</span> binding to the C runtime</h1>
|
<div class="header sticky-top border-bottom py-3 bg-white">
|
||||||
|
<a href=".." title="Home">
|
||||||
Choose a language: <a href="#haskell">Haskell</a> <a href="#python">Python</a> <a href="#java">Java</a> <a href="#csharp">C#</a>
|
<img src="../doc/Logos/gf1.svg" height="120px" class="float-md-right ml-3 mb-3 bg-white" alt="GF Logo">
|
||||||
</span>
|
</a>
|
||||||
|
<h1>
|
||||||
<h4>Krasimir Angelov, July 2015 - August 2017</h4>
|
Using the
|
||||||
|
<span class="python">Python</span>
|
||||||
|
<span class="haskell">Haskell</span>
|
||||||
|
<span class="java">Java</span>
|
||||||
|
<span class="csharp">C#</span>
|
||||||
|
binding to the C runtime
|
||||||
|
</h1>
|
||||||
|
<h4 class="text-muted">Krasimir Angelov, July 2015 - August 2017</h4>
|
||||||
|
Choose a language:
|
||||||
|
<a href="#haskell" class="mx-1">Haskell</a>
|
||||||
|
<a href="#python" class="mx-1">Python</a>
|
||||||
|
<a href="#java" class="mx-1">Java</a>
|
||||||
|
<a href="#csharp" class="mx-1">C#</a>
|
||||||
|
</div>
|
||||||
|
<main class="py-4">
|
||||||
|
|
||||||
<h2>Loading the Grammar</h2>
|
<h2>Loading the Grammar</h2>
|
||||||
|
|
||||||
@@ -1289,6 +1300,7 @@ graph {
|
|||||||
}
|
}
|
||||||
</pre>
|
</pre>
|
||||||
|
|
||||||
|
</main>
|
||||||
|
</div>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ December 2010 for GF 3.2
|
|||||||
% txt2tags --toc -ttex gf-tutorial.txt
|
% txt2tags --toc -ttex gf-tutorial.txt
|
||||||
|
|
||||||
%!target:html
|
%!target:html
|
||||||
%!encoding: iso-8859-1
|
%!encoding: utf-8
|
||||||
%!options: --toc
|
%!options: --toc
|
||||||
|
|
||||||
%!postproc(tex) : "\\subsection\*" "\\newslide"
|
%!postproc(tex) : "\\subsection\*" "\\newslide"
|
||||||
@@ -618,32 +618,32 @@ and **semantic definitions**.
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
#NEW
|
% #NEW
|
||||||
|
%
|
||||||
==Slides==
|
% ==Slides==
|
||||||
|
%
|
||||||
You can chop this tutorial into a set of slides by the command
|
% You can chop this tutorial into a set of slides by the command
|
||||||
```
|
% ```
|
||||||
htmls gf-tutorial.html
|
% htmls gf-tutorial.html
|
||||||
```
|
% ```
|
||||||
where the program ``htmls`` is distributed with GF (see below), in
|
% where the program ``htmls`` is distributed with GF (see below), in
|
||||||
|
%
|
||||||
[``GF/src/tools/Htmls.hs`` http://grammaticalframework.org/src/tools/Htmls.hs]
|
% [``GF/src/tools/Htmls.hs`` http://grammaticalframework.org/src/tools/Htmls.hs]
|
||||||
|
%
|
||||||
The slides will appear as a set of files beginning with ``01-gf-tutorial.htmls``.
|
% The slides will appear as a set of files beginning with ``01-gf-tutorial.htmls``.
|
||||||
|
%
|
||||||
Internal links will not work in the slide format, except for those in the
|
% Internal links will not work in the slide format, except for those in the
|
||||||
upper left corner of each slide, and the links behind the "Contents" link.
|
% upper left corner of each slide, and the links behind the "Contents" link.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#NEW
|
#NEW
|
||||||
|
|
||||||
|
#Lchaptwo
|
||||||
|
|
||||||
=Lesson 1: Getting Started with GF=
|
=Lesson 1: Getting Started with GF=
|
||||||
|
|
||||||
|
|
||||||
#Lchaptwo
|
|
||||||
|
|
||||||
Goals:
|
Goals:
|
||||||
- install and run GF
|
- install and run GF
|
||||||
- write the first GF grammar: a "Hello World" grammar in three languages
|
- write the first GF grammar: a "Hello World" grammar in three languages
|
||||||
@@ -836,8 +836,8 @@ Finnish and an Italian concrete syntaxes:
|
|||||||
lin
|
lin
|
||||||
Hello recip = {s = "terve" ++ recip.s} ;
|
Hello recip = {s = "terve" ++ recip.s} ;
|
||||||
World = {s = "maailma"} ;
|
World = {s = "maailma"} ;
|
||||||
Mum = {s = "äiti"} ;
|
Mum = {s = "äiti"} ;
|
||||||
Friends = {s = "ystävät"} ;
|
Friends = {s = "ystävät"} ;
|
||||||
}
|
}
|
||||||
|
|
||||||
concrete HelloIta of Hello = {
|
concrete HelloIta of Hello = {
|
||||||
@@ -925,7 +925,7 @@ Default of the language flag (``-lang``): the last-imported concrete syntax.
|
|||||||
**Multilingual generation**:
|
**Multilingual generation**:
|
||||||
```
|
```
|
||||||
> parse -lang=HelloEng "hello friends" | linearize
|
> parse -lang=HelloEng "hello friends" | linearize
|
||||||
terve ystävät
|
terve ystävät
|
||||||
ciao amici
|
ciao amici
|
||||||
hello friends
|
hello friends
|
||||||
```
|
```
|
||||||
@@ -1037,9 +1037,10 @@ Application programs, using techniques from #Rchapeight:
|
|||||||
|
|
||||||
#NEW
|
#NEW
|
||||||
|
|
||||||
|
#Lchapthree
|
||||||
|
|
||||||
=Lesson 2: Designing a grammar for complex phrases=
|
=Lesson 2: Designing a grammar for complex phrases=
|
||||||
|
|
||||||
#Lchapthree
|
|
||||||
|
|
||||||
Goals:
|
Goals:
|
||||||
- build a larger grammar: phrases about food in English and Italian
|
- build a larger grammar: phrases about food in English and Italian
|
||||||
@@ -1335,7 +1336,7 @@ Just (?) replace English words with their dictionary equivalents:
|
|||||||
Phrase, Item, Kind, Quality = {s : Str} ;
|
Phrase, Item, Kind, Quality = {s : Str} ;
|
||||||
|
|
||||||
lin
|
lin
|
||||||
Is item quality = {s = item.s ++ "č" ++ quality.s} ;
|
Is item quality = {s = item.s ++ "è" ++ quality.s} ;
|
||||||
This kind = {s = "questo" ++ kind.s} ;
|
This kind = {s = "questo" ++ kind.s} ;
|
||||||
That kind = {s = "quel" ++ kind.s} ;
|
That kind = {s = "quel" ++ kind.s} ;
|
||||||
QKind quality kind = {s = kind.s ++ quality.s} ;
|
QKind quality kind = {s = kind.s ++ quality.s} ;
|
||||||
@@ -1446,11 +1447,11 @@ linearizations in different languages:
|
|||||||
> gr -number=2 | l -treebank
|
> gr -number=2 | l -treebank
|
||||||
|
|
||||||
Is (That Cheese) (Very Boring)
|
Is (That Cheese) (Very Boring)
|
||||||
quel formaggio č molto noioso
|
quel formaggio è molto noioso
|
||||||
that cheese is very boring
|
that cheese is very boring
|
||||||
|
|
||||||
Is (That Cheese) Fresh
|
Is (That Cheese) Fresh
|
||||||
quel formaggio č fresco
|
quel formaggio è fresco
|
||||||
that cheese is fresh
|
that cheese is fresh
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -1472,14 +1473,14 @@ answer given in another language.
|
|||||||
You can interrupt the quiz by entering a line consisting of a dot ('.').
|
You can interrupt the quiz by entering a line consisting of a dot ('.').
|
||||||
|
|
||||||
this fish is warm
|
this fish is warm
|
||||||
questo pesce č caldo
|
questo pesce è caldo
|
||||||
> Yes.
|
> Yes.
|
||||||
Score 1/1
|
Score 1/1
|
||||||
|
|
||||||
this cheese is Italian
|
this cheese is Italian
|
||||||
questo formaggio č noioso
|
questo formaggio è noioso
|
||||||
> No, not questo formaggio č noioso, but
|
> No, not questo formaggio è noioso, but
|
||||||
questo formaggio č italiano
|
questo formaggio è italiano
|
||||||
|
|
||||||
Score 1/2
|
Score 1/2
|
||||||
this fish is expensive
|
this fish is expensive
|
||||||
@@ -1756,7 +1757,7 @@ Simultaneous extension and opening:
|
|||||||
lincat
|
lincat
|
||||||
Question = SS ;
|
Question = SS ;
|
||||||
lin
|
lin
|
||||||
QIs item quality = ss (item.s ++ "č" ++ quality.s) ;
|
QIs item quality = ss (item.s ++ "è" ++ quality.s) ;
|
||||||
Pizza = ss "pizza" ;
|
Pizza = ss "pizza" ;
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
@@ -1797,9 +1798,10 @@ where
|
|||||||
|
|
||||||
#NEW
|
#NEW
|
||||||
|
|
||||||
|
#Lchapfour
|
||||||
|
|
||||||
=Lesson 3: Grammars with parameters=
|
=Lesson 3: Grammars with parameters=
|
||||||
|
|
||||||
#Lchapfour
|
|
||||||
|
|
||||||
Goals:
|
Goals:
|
||||||
- implement sophisticated linguistic structures:
|
- implement sophisticated linguistic structures:
|
||||||
@@ -2364,10 +2366,10 @@ in English, with special care taken of variations with the suffix
|
|||||||
|
|
||||||
+ Implement the German **Umlaut** operation on word stems.
|
+ Implement the German **Umlaut** operation on word stems.
|
||||||
The operation changes the vowel of the stressed stem syllable as follows:
|
The operation changes the vowel of the stressed stem syllable as follows:
|
||||||
//a// to //ä//, //au// to //äu//, //o// to //ö//, and //u// to //ü//. You
|
//a// to //ä//, //au// to //äu//, //o// to //ö//, and //u// to //ü//. You
|
||||||
can assume that the operation only takes syllables as arguments. Test the
|
can assume that the operation only takes syllables as arguments. Test the
|
||||||
operation to see whether it correctly changes //Arzt// to //Ärzt//,
|
operation to see whether it correctly changes //Arzt// to //Ärzt//,
|
||||||
//Baum// to //Bäum//, //Topf// to //Töpf//, and //Kuh// to //Küh//.
|
//Baum// to //Bäum//, //Topf// to //Töpf//, and //Kuh// to //Küh//.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -2480,10 +2482,10 @@ The command ``morpho_quiz = mq`` generates inflection exercises.
|
|||||||
Welcome to GF Morphology Quiz.
|
Welcome to GF Morphology Quiz.
|
||||||
...
|
...
|
||||||
|
|
||||||
réapparaître : VFin VCondit Pl P2
|
réapparaître : VFin VCondit Pl P2
|
||||||
réapparaitriez
|
réapparaitriez
|
||||||
> No, not réapparaitriez, but
|
> No, not réapparaitriez, but
|
||||||
réapparaîtriez
|
réapparaîtriez
|
||||||
Score 0/1
|
Score 0/1
|
||||||
```
|
```
|
||||||
To create a list for later use, use the command ``morpho_list = ml``
|
To create a list for later use, use the command ``morpho_list = ml``
|
||||||
@@ -2563,7 +2565,7 @@ We need only number variation for the copula.
|
|||||||
```
|
```
|
||||||
copula : Number -> Str =
|
copula : Number -> Str =
|
||||||
\n -> case n of {
|
\n -> case n of {
|
||||||
Sg => "č" ;
|
Sg => "è" ;
|
||||||
Pl => "sono"
|
Pl => "sono"
|
||||||
} ;
|
} ;
|
||||||
```
|
```
|
||||||
@@ -2772,9 +2774,10 @@ Thus
|
|||||||
|
|
||||||
#NEW
|
#NEW
|
||||||
|
|
||||||
|
#Lchapfive
|
||||||
|
|
||||||
=Lesson 4: Using the resource grammar library=
|
=Lesson 4: Using the resource grammar library=
|
||||||
|
|
||||||
#Lchapfive
|
|
||||||
|
|
||||||
Goals:
|
Goals:
|
||||||
- navigate in the GF resource grammar library and use it in applications
|
- navigate in the GF resource grammar library and use it in applications
|
||||||
@@ -3305,13 +3308,13 @@ we can write a **functor instantiation**,
|
|||||||
oper
|
oper
|
||||||
wine_N = mkN "Wein" ;
|
wine_N = mkN "Wein" ;
|
||||||
pizza_N = mkN "Pizza" "Pizzen" feminine ;
|
pizza_N = mkN "Pizza" "Pizzen" feminine ;
|
||||||
cheese_N = mkN "Käse" "Käsen" masculine ;
|
cheese_N = mkN "Käse" "Käsen" masculine ;
|
||||||
fish_N = mkN "Fisch" ;
|
fish_N = mkN "Fisch" ;
|
||||||
fresh_A = mkA "frisch" ;
|
fresh_A = mkA "frisch" ;
|
||||||
warm_A = mkA "warm" "wärmer" "wärmste" ;
|
warm_A = mkA "warm" "wärmer" "wärmste" ;
|
||||||
italian_A = mkA "italienisch" ;
|
italian_A = mkA "italienisch" ;
|
||||||
expensive_A = mkA "teuer" ;
|
expensive_A = mkA "teuer" ;
|
||||||
delicious_A = mkA "köstlich" ;
|
delicious_A = mkA "köstlich" ;
|
||||||
boring_A = mkA "langweilig" ;
|
boring_A = mkA "langweilig" ;
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
@@ -3362,11 +3365,11 @@ Lexicon instance
|
|||||||
cheese_N = mkN "juusto" ;
|
cheese_N = mkN "juusto" ;
|
||||||
fish_N = mkN "kala" ;
|
fish_N = mkN "kala" ;
|
||||||
fresh_A = mkA "tuore" ;
|
fresh_A = mkA "tuore" ;
|
||||||
warm_A = mkA "lämmin" ;
|
warm_A = mkA "lämmin" ;
|
||||||
italian_A = mkA "italialainen" ;
|
italian_A = mkA "italialainen" ;
|
||||||
expensive_A = mkA "kallis" ;
|
expensive_A = mkA "kallis" ;
|
||||||
delicious_A = mkA "herkullinen" ;
|
delicious_A = mkA "herkullinen" ;
|
||||||
boring_A = mkA "tylsä" ;
|
boring_A = mkA "tylsä" ;
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
Functor instantiation
|
Functor instantiation
|
||||||
@@ -3614,9 +3617,10 @@ tenses and moods, e.g. the Romance languages.
|
|||||||
|
|
||||||
#NEW
|
#NEW
|
||||||
|
|
||||||
|
#Lchapsix
|
||||||
|
|
||||||
=Lesson 5: Refining semantics in abstract syntax=
|
=Lesson 5: Refining semantics in abstract syntax=
|
||||||
|
|
||||||
#Lchapsix
|
|
||||||
|
|
||||||
Goals:
|
Goals:
|
||||||
- include semantic conditions in grammars, by using
|
- include semantic conditions in grammars, by using
|
||||||
@@ -3626,7 +3630,7 @@ Goals:
|
|||||||
- semantic definitions
|
- semantic definitions
|
||||||
|
|
||||||
These concepts are inherited from **type theory** (more precisely:
|
These concepts are inherited from **type theory** (more precisely:
|
||||||
constructive type theory, or Martin-Löf type theory).
|
constructive type theory, or Martin-Löf type theory).
|
||||||
|
|
||||||
Type theory is the basis **logical frameworks**.
|
Type theory is the basis **logical frameworks**.
|
||||||
|
|
||||||
@@ -4177,11 +4181,11 @@ Type checking can be invoked with ``put_term -transform=solve``.
|
|||||||
|
|
||||||
#NEW
|
#NEW
|
||||||
|
|
||||||
|
#Lchapseven
|
||||||
|
|
||||||
==Lesson 6: Grammars of formal languages==
|
==Lesson 6: Grammars of formal languages==
|
||||||
|
|
||||||
|
|
||||||
#Lchapseven
|
|
||||||
|
|
||||||
Goals:
|
Goals:
|
||||||
- write grammars for formal languages (mathematical notation, programming languages)
|
- write grammars for formal languages (mathematical notation, programming languages)
|
||||||
- interface between formal and natural langauges
|
- interface between formal and natural langauges
|
||||||
@@ -4516,9 +4520,10 @@ point literals as arguments.
|
|||||||
|
|
||||||
#NEW
|
#NEW
|
||||||
|
|
||||||
|
#Lchapeight
|
||||||
|
|
||||||
=Lesson 7: Embedded grammars=
|
=Lesson 7: Embedded grammars=
|
||||||
|
|
||||||
#Lchapeight
|
|
||||||
|
|
||||||
Goals:
|
Goals:
|
||||||
- use grammars as parts of programs written in Haskell and JavaScript
|
- use grammars as parts of programs written in Haskell and JavaScript
|
||||||
@@ -4639,7 +4644,7 @@ output. Therefore it can be a part of a pipe and read and write files.
|
|||||||
The simplest way to translate is to ``echo`` input to the program:
|
The simplest way to translate is to ``echo`` input to the program:
|
||||||
```
|
```
|
||||||
% echo "this wine is delicious" | ./trans Food.pgf
|
% echo "this wine is delicious" | ./trans Food.pgf
|
||||||
questo vino č delizioso
|
questo vino è delizioso
|
||||||
```
|
```
|
||||||
The result is given in all languages except the input language.
|
The result is given in all languages except the input language.
|
||||||
|
|
||||||
@@ -4958,12 +4963,12 @@ syntax name. This file contains the multilingual grammar as a JavaScript object.
|
|||||||
===Using the JavaScript grammar===
|
===Using the JavaScript grammar===
|
||||||
|
|
||||||
To perform parsing and linearization, the run-time library
|
To perform parsing and linearization, the run-time library
|
||||||
``gflib.js`` is used. It is included in ``GF/lib/javascript/``, together with
|
``gflib.js`` is used. It is included in ``/src/runtime/javascript/``, together with
|
||||||
some other JavaScript and HTML files; these files can be used
|
some other JavaScript and HTML files; these files can be used
|
||||||
as templates for building applications.
|
as templates for building applications.
|
||||||
|
|
||||||
An example of usage is
|
An example of usage is
|
||||||
[``translator.html`` http://grammaticalframework.org:41296],
|
[``translator.html`` ../../src/runtime/javascript/translator.html],
|
||||||
which is in fact initialized with
|
which is in fact initialized with
|
||||||
a pointer to the Food grammar, so that it provides translation between the English
|
a pointer to the Food grammar, so that it provides translation between the English
|
||||||
and Italian grammars:
|
and Italian grammars:
|
||||||
|
|||||||
@@ -1,13 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
|
|
||||||
FILES="darcs.txt transfer-reference.txt transfer-tutorial.txt \
|
|
||||||
transfer.txt"
|
|
||||||
|
|
||||||
for f in $FILES; do
|
|
||||||
h=`basename "$f" ".txt"`.html
|
|
||||||
if [ "$f" -nt "$h" ]; then
|
|
||||||
txt2tags $f
|
|
||||||
else
|
|
||||||
echo "$h is newer than $f, skipping"
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
@@ -1,10 +1,6 @@
|
|||||||
GF character encoding changes
|
GF character encoding changes
|
||||||
Thomas Hallgren
|
Thomas Hallgren
|
||||||
%%mtime(%F)
|
2013-12-18
|
||||||
|
|
||||||
%!style:../css/style.css
|
|
||||||
%!postproc(html): <TITLE> <meta charset="UTF-8"><meta name = "viewport" content = "width = device-width"> <TITLE>
|
|
||||||
%!postproc(html): <H1> <H1><a href="../"><IMG src="../doc/Logos/gf0.png"></a>
|
|
||||||
|
|
||||||
==Changes to character encodings in GF grammar files ==
|
==Changes to character encodings in GF grammar files ==
|
||||||
|
|
||||||
|
|||||||
@@ -1,11 +1,6 @@
|
|||||||
Grammatical Framework Download and Installation
|
Grammatical Framework Download and Installation
|
||||||
|
|
||||||
|
|
||||||
%!style:../css/style.css
|
|
||||||
%!postproc(html): <TD><B> <TH>
|
|
||||||
%!postproc(html): </B></TD> </TH>
|
|
||||||
%!postproc(html): <H1> <H1><a href="../"><IMG src="../doc/Logos/gf0.png"></a>
|
|
||||||
|
|
||||||
**GF 3.2.9** source-only snapshot was released on 12 September 2011.
|
**GF 3.2.9** source-only snapshot was released on 12 September 2011.
|
||||||
|
|
||||||
What's new? Faster grammar compilation!
|
What's new? Faster grammar compilation!
|
||||||
@@ -77,9 +72,3 @@ The above notes for installing from source apply also in this case.
|
|||||||
- [GF 3.2 index-3.2.html] (December 2011).
|
- [GF 3.2 index-3.2.html] (December 2011).
|
||||||
- [GF 3.1.6 index-3.1.6.html] (April 2010).
|
- [GF 3.1.6 index-3.1.6.html] (April 2010).
|
||||||
- [GF 3.1 old-index.html] (December 2009).
|
- [GF 3.1 old-index.html] (December 2009).
|
||||||
|
|
||||||
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
[www.grammaticalframework.org http://www.grammaticalframework.org]
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,11 +1,6 @@
|
|||||||
Grammatical Framework Download and Installation
|
Grammatical Framework Download and Installation
|
||||||
|
|
||||||
|
|
||||||
%!style:../css/style.css
|
|
||||||
%!postproc(html): <TD><B> <TH>
|
|
||||||
%!postproc(html): </B></TD> </TH>
|
|
||||||
%!postproc(html): <H1> <H1><a href="../"><IMG src="../doc/Logos/gf0.png"></a>
|
|
||||||
|
|
||||||
**GF 3.2** was released on 23 December 2010.
|
**GF 3.2** was released on 23 December 2010.
|
||||||
|
|
||||||
What's new? See the [Release notes release-3.2.html].
|
What's new? See the [Release notes release-3.2.html].
|
||||||
@@ -105,8 +100,3 @@ Subsequently:
|
|||||||
```
|
```
|
||||||
|
|
||||||
The above notes for installing from source apply also in this case.
|
The above notes for installing from source apply also in this case.
|
||||||
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
[www.grammaticalframework.org http://www.grammaticalframework.org]
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,12 +1,6 @@
|
|||||||
Grammatical Framework Download and Installation
|
Grammatical Framework Download and Installation
|
||||||
|
|
||||||
|
|
||||||
%!style:../css/style.css
|
|
||||||
%!postproc(html): <TITLE> <meta name = "viewport" content = "width = device-width"><TITLE>
|
|
||||||
%!postproc(html): <TD><B> <TH>
|
|
||||||
%!postproc(html): </B></TD> </TH>
|
|
||||||
%!postproc(html): <H1> <H1><a href="../"><IMG src="../doc/Logos/gf0.png"></a>
|
|
||||||
|
|
||||||
**GF 3.3.3** was released on 3 March 2012.
|
**GF 3.3.3** was released on 3 March 2012.
|
||||||
|
|
||||||
What's new? See the [Release notes release-3.3.3.html].
|
What's new? See the [Release notes release-3.3.3.html].
|
||||||
@@ -127,9 +121,3 @@ For more info, see the [GF Developers Guide ../doc/gf-developers.html].
|
|||||||
- [GF 3.2 index-3.2.html] (December 2010).
|
- [GF 3.2 index-3.2.html] (December 2010).
|
||||||
- [GF 3.1.6 index-3.1.6.html] (April 2010).
|
- [GF 3.1.6 index-3.1.6.html] (April 2010).
|
||||||
- [GF 3.1 old-index.html] (December 2009).
|
- [GF 3.1 old-index.html] (December 2009).
|
||||||
|
|
||||||
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
[www.grammaticalframework.org http://www.grammaticalframework.org]
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,11 +1,6 @@
|
|||||||
Grammatical Framework Download and Installation
|
Grammatical Framework Download and Installation
|
||||||
|
|
||||||
|
|
||||||
%!style:../css/style.css
|
|
||||||
%!postproc(html): <TD><B> <TH>
|
|
||||||
%!postproc(html): </B></TD> </TH>
|
|
||||||
%!postproc(html): <H1> <H1><a href="../"><IMG src="../doc/Logos/gf0.png"></a>
|
|
||||||
|
|
||||||
**GF 3.3** was released on 27 October 2011.
|
**GF 3.3** was released on 27 October 2011.
|
||||||
|
|
||||||
What's new? See the [Release notes release-3.3.html].
|
What's new? See the [Release notes release-3.3.html].
|
||||||
@@ -115,9 +110,3 @@ The above notes for installing from source apply also in this case.
|
|||||||
- [GF 3.2 index-3.2.html] (December 2010).
|
- [GF 3.2 index-3.2.html] (December 2010).
|
||||||
- [GF 3.1.6 index-3.1.6.html] (April 2010).
|
- [GF 3.1.6 index-3.1.6.html] (April 2010).
|
||||||
- [GF 3.1 old-index.html] (December 2009).
|
- [GF 3.1 old-index.html] (December 2009).
|
||||||
|
|
||||||
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
[www.grammaticalframework.org http://www.grammaticalframework.org]
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,13 +1,6 @@
|
|||||||
Grammatical Framework Download and Installation
|
Grammatical Framework Download and Installation
|
||||||
|
|
||||||
|
|
||||||
%!style:../css/style.css
|
|
||||||
%!postproc(html): </HEAD> <STYLE>body { color: #333; } li>code,p>code,pre { color: #730; }</STYLE></HEAD>
|
|
||||||
%!postproc(html): <TITLE> <meta name = "viewport" content = "width = device-width"><TITLE>
|
|
||||||
%!postproc(html): <TD><B> <TH>
|
|
||||||
%!postproc(html): </B></TD> </TH>
|
|
||||||
%!postproc(html): <H1> <H1><a href="../"><IMG src="../doc/Logos/gf0.png"></a>
|
|
||||||
|
|
||||||
**GF 3.4** was released on 31 January 2013.
|
**GF 3.4** was released on 31 January 2013.
|
||||||
|
|
||||||
What's new? See the [Release notes release-3.4.html].
|
What's new? See the [Release notes release-3.4.html].
|
||||||
@@ -20,10 +13,7 @@ What's new? See the [Release notes release-3.4.html].
|
|||||||
| Fedora (32-bit) | [Fedora RPMs /~hallgren/tmp/Fedora/] | ``sudo rpm -i ...``
|
| Fedora (32-bit) | [Fedora RPMs /~hallgren/tmp/Fedora/] | ``sudo rpm -i ...``
|
||||||
| Ubuntu (32-bit) | [gf_3.4-1_i386.deb gf_3.4-1_i386.deb] | ``sudo dpkg -i gf_3.4-1_i386.deb``
|
| Ubuntu (32-bit) | [gf_3.4-1_i386.deb gf_3.4-1_i386.deb] | ``sudo dpkg -i gf_3.4-1_i386.deb``
|
||||||
| Ubuntu (64-bit) | [gf_3.4-1_amd64.deb gf_3.4-1_amd64.deb] | ``sudo dpkg -i gf_3.4-1_amd64.deb``
|
| Ubuntu (64-bit) | [gf_3.4-1_amd64.deb gf_3.4-1_amd64.deb] | ``sudo dpkg -i gf_3.4-1_amd64.deb``
|
||||||
| Windows | [gf-3.4-bin-windows.zip gf-3.4-bin-windows.zip] |
|
| Windows | [gf-3.4-bin-windows.zip gf-3.4-bin-windows.zip] | -
|
||||||
%| ... | ... | ...
|
|
||||||
|
|
||||||
%More binary packages might be added later.
|
|
||||||
|
|
||||||
===Notes===
|
===Notes===
|
||||||
|
|
||||||
@@ -153,8 +143,3 @@ For more info on working with the GF source code, see the
|
|||||||
- [GF 3.2 index-3.2.html] (December 2010).
|
- [GF 3.2 index-3.2.html] (December 2010).
|
||||||
- [GF 3.1.6 index-3.1.6.html] (April 2010).
|
- [GF 3.1.6 index-3.1.6.html] (April 2010).
|
||||||
- [GF 3.1 old-index.html] (December 2009).
|
- [GF 3.1 old-index.html] (December 2009).
|
||||||
|
|
||||||
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
[www.grammaticalframework.org http://www.grammaticalframework.org]
|
|
||||||
|
|||||||
@@ -1,13 +1,6 @@
|
|||||||
Grammatical Framework Download and Installation
|
Grammatical Framework Download and Installation
|
||||||
|
|
||||||
|
|
||||||
%!style:../css/style.css
|
|
||||||
%!postproc(html): </HEAD> <STYLE>body { color: #333; } li>code,p>code,pre { color: #730; }</STYLE></HEAD>
|
|
||||||
%!postproc(html): <TITLE> <meta name = "viewport" content = "width = device-width"><TITLE>
|
|
||||||
%!postproc(html): <TD><B> <TH>
|
|
||||||
%!postproc(html): </B></TD> </TH>
|
|
||||||
%!postproc(html): <H1> <H1><a href="../"><IMG src="../doc/Logos/gf0.png"></a>
|
|
||||||
|
|
||||||
**GF 3.5** was released on 6 August 2013.
|
**GF 3.5** was released on 6 August 2013.
|
||||||
|
|
||||||
What's new? See the [Release notes release-3.5.html].
|
What's new? See the [Release notes release-3.5.html].
|
||||||
@@ -157,8 +150,3 @@ For more info on working with the GF source code, see the
|
|||||||
- [GF 3.2 index-3.2.html] (December 2010).
|
- [GF 3.2 index-3.2.html] (December 2010).
|
||||||
- [GF 3.1.6 index-3.1.6.html] (April 2010).
|
- [GF 3.1.6 index-3.1.6.html] (April 2010).
|
||||||
- [GF 3.1 old-index.html] (December 2009).
|
- [GF 3.1 old-index.html] (December 2009).
|
||||||
|
|
||||||
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
[www.grammaticalframework.org http://www.grammaticalframework.org]
|
|
||||||
|
|||||||
@@ -1,13 +1,6 @@
|
|||||||
Grammatical Framework Download and Installation
|
Grammatical Framework Download and Installation
|
||||||
|
|
||||||
|
|
||||||
%!style:../css/style.css
|
|
||||||
%!postproc(html): </HEAD> <STYLE>body { color: #333; } li>code,p>code,pre { color: #730; }</STYLE></HEAD>
|
|
||||||
%!postproc(html): <TITLE> <meta name = "viewport" content = "width = device-width"><TITLE>
|
|
||||||
%!postproc(html): <H1> <H1><a href="../"><IMG src="../doc/Logos/gf0.png"></a>
|
|
||||||
%!postproc(html): <TD><I> <TD><small>
|
|
||||||
%!postproc(html): </I></TD> </small></TD>
|
|
||||||
|
|
||||||
**GF 3.6** was released on 23 June 2014.
|
**GF 3.6** was released on 23 June 2014.
|
||||||
|
|
||||||
What's new? See the [Release notes release-3.6.html].
|
What's new? See the [Release notes release-3.6.html].
|
||||||
@@ -177,8 +170,3 @@ For more info on working with the GF source code, see the
|
|||||||
- [GF 3.2 index-3.2.html] (December 2010).
|
- [GF 3.2 index-3.2.html] (December 2010).
|
||||||
- [GF 3.1.6 index-3.1.6.html] (April 2010).
|
- [GF 3.1.6 index-3.1.6.html] (April 2010).
|
||||||
- [GF 3.1 old-index.html] (December 2009).
|
- [GF 3.1 old-index.html] (December 2009).
|
||||||
|
|
||||||
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
[www.grammaticalframework.org http://www.grammaticalframework.org]
|
|
||||||
|
|||||||
@@ -1,12 +1,6 @@
|
|||||||
Grammatical Framework Download and Installation
|
Grammatical Framework Download and Installation
|
||||||
|
|
||||||
|
|
||||||
%!style:../css/notes.css
|
|
||||||
%!postproc(html): <TITLE> <meta name = "viewport" content = "width = device-width"><TITLE>
|
|
||||||
%!postproc(html): <H1> <H1><a href="../"><IMG src="../doc/Logos/gf0.png"></a>
|
|
||||||
%!postproc(html): <TD><I> <TD><small>
|
|
||||||
%!postproc(html): </I></TD> </small></TD>
|
|
||||||
|
|
||||||
**GF 3.7.1** was released on 2 October 2015.
|
**GF 3.7.1** was released on 2 October 2015.
|
||||||
|
|
||||||
What's new? See the [Release notes release-3.7.1.html].
|
What's new? See the [Release notes release-3.7.1.html].
|
||||||
@@ -180,8 +174,3 @@ For more info on working with the GF source code, see the
|
|||||||
- [GF 3.2 index-3.2.html] (December 2010).
|
- [GF 3.2 index-3.2.html] (December 2010).
|
||||||
- [GF 3.1.6 index-3.1.6.html] (April 2010).
|
- [GF 3.1.6 index-3.1.6.html] (April 2010).
|
||||||
- [GF 3.1 old-index.html] (December 2009).
|
- [GF 3.1 old-index.html] (December 2009).
|
||||||
|
|
||||||
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
[www.grammaticalframework.org http://www.grammaticalframework.org]
|
|
||||||
|
|||||||
@@ -1,13 +1,6 @@
|
|||||||
Grammatical Framework Download and Installation
|
Grammatical Framework Download and Installation
|
||||||
|
|
||||||
|
|
||||||
%!style:../css/style.css
|
|
||||||
%!postproc(html): </HEAD> <STYLE>body { color: #333; } li>code,p>code,pre { color: #730; }</STYLE></HEAD>
|
|
||||||
%!postproc(html): <TITLE> <meta name = "viewport" content = "width = device-width"><TITLE>
|
|
||||||
%!postproc(html): <H1> <H1><a href="../"><IMG src="../doc/Logos/gf0.png"></a>
|
|
||||||
%!postproc(html): <TD><I> <TD><small>
|
|
||||||
%!postproc(html): </I></TD> </small></TD>
|
|
||||||
|
|
||||||
**GF 3.7** was released on 25 June 2015.
|
**GF 3.7** was released on 25 June 2015.
|
||||||
|
|
||||||
What's new? See the [Release notes release-3.7.html].
|
What's new? See the [Release notes release-3.7.html].
|
||||||
@@ -173,8 +166,3 @@ For more info on working with the GF source code, see the
|
|||||||
- [GF 3.2 index-3.2.html] (December 2010).
|
- [GF 3.2 index-3.2.html] (December 2010).
|
||||||
- [GF 3.1.6 index-3.1.6.html] (April 2010).
|
- [GF 3.1.6 index-3.1.6.html] (April 2010).
|
||||||
- [GF 3.1 old-index.html] (December 2009).
|
- [GF 3.1 old-index.html] (December 2009).
|
||||||
|
|
||||||
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
[www.grammaticalframework.org http://www.grammaticalframework.org]
|
|
||||||
|
|||||||
@@ -1,12 +1,6 @@
|
|||||||
Grammatical Framework Download and Installation
|
Grammatical Framework Download and Installation
|
||||||
|
|
||||||
|
|
||||||
%!style:../css/notes.css
|
|
||||||
%!postproc(html): <TITLE> <meta name = "viewport" content = "width = device-width"><TITLE>
|
|
||||||
%!postproc(html): <H1> <H1><a href="../"><IMG src="../doc/Logos/gf0.png"></a>
|
|
||||||
%!postproc(html): <TD><I> <TD><small>
|
|
||||||
%!postproc(html): </I></TD> </small></TD>
|
|
||||||
|
|
||||||
**GF 3.8** was released on 22 June 2016.
|
**GF 3.8** was released on 22 June 2016.
|
||||||
|
|
||||||
What's new? See the [Release notes release-3.8.html].
|
What's new? See the [Release notes release-3.8.html].
|
||||||
@@ -171,8 +165,3 @@ For more info on working with the GF source code, see the
|
|||||||
- [GF 3.2 index-3.2.html] (December 2010).
|
- [GF 3.2 index-3.2.html] (December 2010).
|
||||||
- [GF 3.1.6 index-3.1.6.html] (April 2010).
|
- [GF 3.1.6 index-3.1.6.html] (April 2010).
|
||||||
- [GF 3.1 old-index.html] (December 2009).
|
- [GF 3.1 old-index.html] (December 2009).
|
||||||
|
|
||||||
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
[www.grammaticalframework.org http://www.grammaticalframework.org]
|
|
||||||
|
|||||||
@@ -1,12 +1,6 @@
|
|||||||
Grammatical Framework Download and Installation
|
Grammatical Framework Download and Installation
|
||||||
|
|
||||||
|
|
||||||
%!style:../css/notes.css
|
|
||||||
%!postproc(html): <TITLE> <meta name = "viewport" content = "width = device-width"><TITLE>
|
|
||||||
%!postproc(html): <H1> <H1><a href="../"><IMG src="../doc/Logos/gf0.png"></a>
|
|
||||||
%!postproc(html): <TD><I> <TD><small>
|
|
||||||
%!postproc(html): </I></TD> </small></TD>
|
|
||||||
|
|
||||||
**GF 3.9** was released on 11 August 2017.
|
**GF 3.9** was released on 11 August 2017.
|
||||||
|
|
||||||
What's new? See the [Release notes release-3.9.html].
|
What's new? See the [Release notes release-3.9.html].
|
||||||
@@ -18,10 +12,11 @@ What's new? See the [Release notes release-3.9.html].
|
|||||||
| macOS | [gf-3.9.pkg gf-3.9.pkg] | //GF+S+C+J+P// | Double-click on the package icon
|
| macOS | [gf-3.9.pkg gf-3.9.pkg] | //GF+S+C+J+P// | Double-click on the package icon
|
||||||
| macOS | [gf-3.9-bin-intel-mac.tar.gz gf-3.9-bin-intel-mac.tar.gz] | //GF+S+C+J+P// | ``sudo tar -C /usr/local -zxf gf-3.9-bin-intel-mac.tar.gz``
|
| macOS | [gf-3.9-bin-intel-mac.tar.gz gf-3.9-bin-intel-mac.tar.gz] | //GF+S+C+J+P// | ``sudo tar -C /usr/local -zxf gf-3.9-bin-intel-mac.tar.gz``
|
||||||
%| Fedora (32-bit) | [Fedora RPMs /~hallgren/tmp/Fedora/] | //GF+S+C+J+P// | ``sudo rpm -i ...``
|
%| Fedora (32-bit) | [Fedora RPMs /~hallgren/tmp/Fedora/] | //GF+S+C+J+P// | ``sudo rpm -i ...``
|
||||||
| Raspian 9.1 | [gf_3.9-1_armhf.deb gf_3.9-1_armhf.deb] | //GF+S+C+J+P// | ``sudo dpkg -i gf_3.9-1_armhf.deb``
|
| Raspbian 9.1 | [gf_3.9-1_armhf.deb gf_3.9-1_armhf.deb] | //GF+S+C+J+P// | ``sudo dpkg -i gf_3.9-1_armhf.deb``
|
||||||
| Ubuntu (32-bit) | [gf_3.9-1_i386.deb gf_3.9-1_i386.deb] | //GF+S+C+J+P// | ``sudo dpkg -i gf_3.9-1_i386.deb``
|
| Ubuntu (32-bit) | [gf_3.9-1_i386.deb gf_3.9-1_i386.deb] | //GF+S+C+J+P// | ``sudo dpkg -i gf_3.9-1_i386.deb``
|
||||||
| Ubuntu (64-bit) | [gf_3.9-1_amd64.deb gf_3.9-1_amd64.deb] | //GF+S+C+J+P// | ``sudo dpkg -i gf_3.9-1_amd64.deb``
|
| Ubuntu (64-bit) | [gf_3.9-1_amd64.deb gf_3.9-1_amd64.deb] | //GF+S+C+J+P// | ``sudo dpkg -i gf_3.9-1_amd64.deb``
|
||||||
| Windows | [gf-3.9-bin-windows.zip gf-3.9-bin-windows.zip] | //GF+S// | ``unzip gf-3.9-bin-windows.zip``
|
| Windows | [gf-3.9-bin-windows.zip gf-3.9-bin-windows.zip] | //GF+S// | ``unzip gf-3.9-bin-windows.zip``
|
||||||
|
|
||||||
%| MINGW | [gf-3.9-bin-i686-MINGW32_NT-6.1.tar.gz gf-3.9-bin-i686-MINGW32_NT-6.1.tar.gz] | //GF+S+C// | ``tar -C / gf-3.9-bin-i686-MINGW32_NT-6.1.tar.gz``
|
%| MINGW | [gf-3.9-bin-i686-MINGW32_NT-6.1.tar.gz gf-3.9-bin-i686-MINGW32_NT-6.1.tar.gz] | //GF+S+C// | ``tar -C / gf-3.9-bin-i686-MINGW32_NT-6.1.tar.gz``
|
||||||
%| ... | ... | ... | ...
|
%| ... | ... | ... | ...
|
||||||
|
|
||||||
@@ -195,8 +190,3 @@ with ``stack install`` (assuming you already have Stack set up).
|
|||||||
- [GF 3.2 index-3.2.html] (December 2010).
|
- [GF 3.2 index-3.2.html] (December 2010).
|
||||||
- [GF 3.1.6 index-3.1.6.html] (April 2010).
|
- [GF 3.1.6 index-3.1.6.html] (April 2010).
|
||||||
- [GF 3.1 old-index.html] (December 2009).
|
- [GF 3.1 old-index.html] (December 2009).
|
||||||
|
|
||||||
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
[www.grammaticalframework.org http://www.grammaticalframework.org]
|
|
||||||
187
download/index.md
Normal file
187
download/index.md
Normal file
@@ -0,0 +1,187 @@
|
|||||||
|
---
|
||||||
|
title: Grammatical Framework Download and Installation
|
||||||
|
...
|
||||||
|
|
||||||
|
**GF 3.10** was released on 2 December 2018.
|
||||||
|
|
||||||
|
What's new? See the [release notes](release-3.10.html).
|
||||||
|
|
||||||
|
## Binary packages
|
||||||
|
|
||||||
|
These binary packages include both the GF core (compiler and runtime) as well as the pre-compiled RGL.
|
||||||
|
|
||||||
|
| Platform | Download | Features | How to install |
|
||||||
|
|:----------------|:---------------------------------------------------|:---------------|:-----------------------------------|
|
||||||
|
| macOS | [gf-3.10.pkg](gf-3.10.pkg) | GF, S, C, J, P | Double-click on the package icon |
|
||||||
|
| Ubuntu (32-bit) | [gf\_3.10-2\_i386.deb](gf_3.10-2_i386.deb) | GF, S, C, J, P | `sudo dpkg -i gf_3.10-2_i386.deb` |
|
||||||
|
| Ubuntu (64-bit) | [gf\_3.10-2\_amd64.deb](gf_3.10-2_amd64.deb) | GF, S, C, J, P | `sudo dpkg -i gf_3.10-2_amd64.deb` |
|
||||||
|
| Windows | [gf-3.10-bin-windows.zip](gf-3.10-bin-windows.zip) | GF, S | `unzip gf-3.10-bin-windows.zip` |
|
||||||
|
|
||||||
|
<!--
|
||||||
|
| macOS | [gf-3.10-bin-intel-mac.tar.gz](gf-3.10-bin-intel-mac.tar.gz) | GF,S,C,J,P | `sudo tar -C /usr/local -zxf gf-3.10-bin-intel-mac.tar.gz` |
|
||||||
|
| Raspbian 9.1 | [gf\_3.10-1\_armhf.deb](gf_3.10-1_armhf.deb) | GF,S,C,J,P | `sudo dpkg -i gf_3.10-1_armhf.deb` |
|
||||||
|
-->
|
||||||
|
|
||||||
|
**Features**
|
||||||
|
|
||||||
|
- GF = GF shell and grammar compiler
|
||||||
|
- S = `gf -server` mode
|
||||||
|
- C = C run-time system
|
||||||
|
- J/P = Java/Python binding to the C run-time system
|
||||||
|
|
||||||
|
### Notes
|
||||||
|
|
||||||
|
The Windows package is installed by just unpacking it anywhere. You will
|
||||||
|
probably need to set the `PATH` and `GF_LIB_PATH` environment variables,
|
||||||
|
see Inari's notes on [Installing GF on Windows](http://www.grammaticalframework.org/~inari/gf-windows.html#toc3).
|
||||||
|
|
||||||
|
The Ubuntu `.deb` packages should work on Ubuntu 16.04 and 18.04 and
|
||||||
|
similar Linux distributions. The `.deb` packages were updated
|
||||||
|
to version 3.10-2 after the release of GF 3.10.
|
||||||
|
(Because of a packaging bug the Resource Grammar Library was missing
|
||||||
|
in the 3.10-1 packages.)
|
||||||
|
|
||||||
|
<!-- The Raspbian `.deb` package was created on a Raspberry Pi 3 and will
|
||||||
|
probably work on other ARM-based systems running Debian 9 (stretch) or
|
||||||
|
similar Linux distributions. -->
|
||||||
|
|
||||||
|
The packages for macOS (Mac OS X) should work on at least 10.13 and
|
||||||
|
10.14 (High Sierra and Mojave)
|
||||||
|
|
||||||
|
<!-- The Mac OS and Linux `.tar.gz` packages are designed to be installed in
|
||||||
|
`/usr/local`. You can install them in other locations, but then you need
|
||||||
|
to set the `GF_LIB_PATH` environment variable:
|
||||||
|
|
||||||
|
```
|
||||||
|
export GF_LIB_PATH=/usr/local/share/gf-3.10/lib
|
||||||
|
```
|
||||||
|
|
||||||
|
where `/usr/local` should be replaced with the path to the location
|
||||||
|
where you unpacked the package. -->
|
||||||
|
|
||||||
|
## Installing the latest release from source
|
||||||
|
|
||||||
|
[GF is on Hackage](http://hackage.haskell.org/package/gf), so under
|
||||||
|
normal circumstances the procedure is fairly simple:
|
||||||
|
|
||||||
|
1. Install a recent version of the [Haskell
|
||||||
|
Platform](http://hackage.haskell.org/platform) (see note below)
|
||||||
|
2. `cabal update`
|
||||||
|
3. On Linux: install some C libraries from your Linux distribution (see note below)
|
||||||
|
4. `cabal install gf`
|
||||||
|
|
||||||
|
This installs the GF executable and Haskell libraries, but **does not include the RGL**.
|
||||||
|
|
||||||
|
You can also download the source code release from [GitHub](https://github.com/GrammaticalFramework/gf-core/releases),
|
||||||
|
and follow the instructions below under **Installing from the latest developer source code**.
|
||||||
|
|
||||||
|
### Notes
|
||||||
|
|
||||||
|
**Installation location**
|
||||||
|
|
||||||
|
The above steps installs GF for a single user. The executables are put
|
||||||
|
in `$HOME/.cabal/bin` (or, with recent versions of the Haskell platform
|
||||||
|
on Mac OS X, in `$HOME/Library/Haskell/bin`), so it is a good idea to
|
||||||
|
put a line in your `.bash_profile` or `.profile` to add that directory
|
||||||
|
to you path:
|
||||||
|
|
||||||
|
```
|
||||||
|
PATH=$HOME/.cabal/bin:$PATH
|
||||||
|
```
|
||||||
|
|
||||||
|
or
|
||||||
|
|
||||||
|
```
|
||||||
|
PATH=$HOME/Library/Haskell/bin:$PATH
|
||||||
|
```
|
||||||
|
|
||||||
|
**Build tools**
|
||||||
|
|
||||||
|
In order to compile GF you need the build tools **Alex** and **Happy**.
|
||||||
|
These can be installed via Cabal, e.g.:
|
||||||
|
|
||||||
|
```
|
||||||
|
cabal install alex happy
|
||||||
|
```
|
||||||
|
|
||||||
|
or obtained by other means, depending on your OS.
|
||||||
|
|
||||||
|
**Haskeline**
|
||||||
|
|
||||||
|
GF uses [`haskeline`](http://hackage.haskell.org/package/haskeline), which
|
||||||
|
on Linux depends on some non-Haskell libraries that won't be installed
|
||||||
|
automatically by cabal, and therefore need to be installed manually.
|
||||||
|
Here is one way to do this:
|
||||||
|
|
||||||
|
- On Ubuntu: `sudo apt-get install libghc-haskeline-dev`
|
||||||
|
- On Fedora: `sudo yum install ghc-haskeline-devel`
|
||||||
|
|
||||||
|
**GHC version**
|
||||||
|
|
||||||
|
The GF source code has been updated to compile with GHC 8.4.
|
||||||
|
Using older versions of GHC (e.g. 8.2, 8.0 and 7.10) should still work too.
|
||||||
|
|
||||||
|
## Installing from the latest developer source code
|
||||||
|
|
||||||
|
If you haven't already, clone the repository with:
|
||||||
|
|
||||||
|
```
|
||||||
|
git clone https://github.com/GrammaticalFramework/gf-core.git
|
||||||
|
```
|
||||||
|
|
||||||
|
If you've already cloned the repository previously, update with:
|
||||||
|
|
||||||
|
```
|
||||||
|
git pull
|
||||||
|
```
|
||||||
|
|
||||||
|
Then install with:
|
||||||
|
|
||||||
|
```
|
||||||
|
cabal install
|
||||||
|
```
|
||||||
|
|
||||||
|
or, if you're a Stack user:
|
||||||
|
|
||||||
|
```
|
||||||
|
stack install
|
||||||
|
```
|
||||||
|
|
||||||
|
The above notes for installing from source apply also in these cases.
|
||||||
|
For more info on working with the GF source code, see the
|
||||||
|
[GF Developers Guide](../doc/gf-developers.html).
|
||||||
|
|
||||||
|
## Installing the RGL from source
|
||||||
|
|
||||||
|
To install the RGL from source,
|
||||||
|
you can download a release from [GitHub](https://github.com/GrammaticalFramework/gf-rgl/releases)
|
||||||
|
or get the latest version by cloning the repository:
|
||||||
|
|
||||||
|
```
|
||||||
|
git clone https://github.com/GrammaticalFramework/gf-rgl.git
|
||||||
|
```
|
||||||
|
|
||||||
|
In both cases, once you have the RGL sources you can install them by running:
|
||||||
|
|
||||||
|
```
|
||||||
|
make
|
||||||
|
```
|
||||||
|
|
||||||
|
in the RGL folder.
|
||||||
|
This assumes that you already have GF installed.
|
||||||
|
For more details about building the RGL, see the [RGL README](https://github.com/GrammaticalFramework/gf-rgl/blob/master/README.md).
|
||||||
|
|
||||||
|
## Older releases
|
||||||
|
|
||||||
|
- [GF 3.9](index-3.9.html) (August 2017)
|
||||||
|
- [GF 3.8](index-3.8.html) (June 2016)
|
||||||
|
- [GF 3.7.1](index-3.7.1.html) (October 2015)
|
||||||
|
- [GF 3.7](index-3.7.html) (June 2015)
|
||||||
|
- [GF 3.6](index-3.6.html) (June 2014)
|
||||||
|
- [GF 3.5](index-3.5.html) (August 2013)
|
||||||
|
- [GF 3.4](index-3.4.html) (January 2013)
|
||||||
|
- [GF 3.3.3](index-3.3.3.html) (March 2012)
|
||||||
|
- [GF 3.3](index-3.3.html) (October 2011)
|
||||||
|
- [GF 3.2.9](index-3.2.9.html) source-only snapshot (September 2011)
|
||||||
|
- [GF 3.2](index-3.2.html) (December 2010)
|
||||||
|
- [GF 3.1.6](index-3.1.6.html) (April 2010)
|
||||||
@@ -57,8 +57,3 @@ Internal
|
|||||||
|
|
||||||
Javascript generation is not updated to the new PGF format.
|
Javascript generation is not updated to the new PGF format.
|
||||||
[GF 3.1 old-index.html] should still be used for building Javascript applications.
|
[GF 3.1 old-index.html] should still be used for building Javascript applications.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
66
download/release-3.10.md
Normal file
66
download/release-3.10.md
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
---
|
||||||
|
title: GF 3.10 Release Notes
|
||||||
|
date: 2 December 2018
|
||||||
|
...
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
See the [download page](index.html).
|
||||||
|
|
||||||
|
## What's new
|
||||||
|
|
||||||
|
In this release, the GF "core" (compiler and runtimes) and RGL have been split into separate repositories.
|
||||||
|
The binary packages on the downloads page contain both GF and the RGL, but the sources are now separate:
|
||||||
|
[gf-core](https://github.com/GrammaticalFramework/gf-core) and
|
||||||
|
[gf-rgl](https://github.com/GrammaticalFramework/gf-rgl).
|
||||||
|
|
||||||
|
Over 300 changes have been pushed to GF and over 600 changes have been made to the RGL
|
||||||
|
since the release of GF 3.9 in August 2017.
|
||||||
|
|
||||||
|
## General
|
||||||
|
|
||||||
|
- Travis integration:
|
||||||
|
GF [](https://travis-ci.org/GrammaticalFramework/gf-core) and
|
||||||
|
RGL [](https://travis-ci.org/GrammaticalFramework/gf-rgl)
|
||||||
|
- A lot of bug fixes and repository cleanup, including things moved to new repositories:
|
||||||
|
- [Phrasebook](https://github.com/GrammaticalFramework/gf-contrib/tree/master/phrasebook)
|
||||||
|
- [Wide coverage translator](https://github.com/GrammaticalFramework/wide-coverage)
|
||||||
|
- [Mobile apps](https://github.com/GrammaticalFramework/gf-offline-translator)
|
||||||
|
- [gftest](https://github.com/GrammaticalFramework/gftest)
|
||||||
|
- [gf-mode](https://github.com/GrammaticalFramework/gf-emacs-mode) for Emacs
|
||||||
|
- [RGL browser](https://github.com/GrammaticalFramework/rgl-source-browser) (live [here](http://www.grammaticalframework.org/~john/rgl-browser/))
|
||||||
|
- A fresh look for the GF website.
|
||||||
|
|
||||||
|
## GF compiler and run-time library
|
||||||
|
|
||||||
|
- Extensive improvements in the C runtime and bindings to it from Python, Java, Haskell, C#
|
||||||
|
- A GF shell which uses the C runtime
|
||||||
|
- Better error messages
|
||||||
|
- GF now has a Stack configuration file
|
||||||
|
- The compiler source code has been updated for compatibility with GHC 8.4.3.
|
||||||
|
- `GF_LIB_PATH` can now be `path1:path2:path3`, not just `path1`
|
||||||
|
- Add TypeScript type definitions for `gflib.js`
|
||||||
|
- New compiler/shell options
|
||||||
|
- added option `-output-format=java` for producing code for embedded grammars in Java
|
||||||
|
- `rf -paragraphs`
|
||||||
|
- `linearize -tabtreebank`
|
||||||
|
- A new function called `completions` is added in the Haskell runtime and used in PGFService. This makes the extraction of completions more platform independent
|
||||||
|
|
||||||
|
## Resource Grammar Library
|
||||||
|
|
||||||
|
- [Bash build script](https://github.com/GrammaticalFramework/gf-rgl/blob/master/Setup.sh), for building the RGL without Haskell
|
||||||
|
- [Windows build script](https://github.com/GrammaticalFramework/gf-rgl/blob/master/Setup.bat), for building the RGL without Haskell on a regular Windows command shell
|
||||||
|
- New languages:
|
||||||
|
- Basque
|
||||||
|
- Portuguese
|
||||||
|
- Big progress with Arabic, Turkish, Persian
|
||||||
|
- Introduction of `Extend` module to combine the functions of `Extra` and `Extensions` in a more disciplined way
|
||||||
|
- Various fixes for several languages.
|
||||||
|
- Various fixes in the translation dictionaries.
|
||||||
|
|
||||||
|
## Apps and Cloud services
|
||||||
|
|
||||||
|
- Sort list of public grammars by age by default
|
||||||
|
- Browser compatibility fixes
|
||||||
|
- Allow public grammars to be deleted in more cases
|
||||||
|
- Show grammar comments in the list of public grammars
|
||||||
@@ -1,9 +1,6 @@
|
|||||||
GF Version 3.2 Release Notes
|
GF Version 3.2 Release Notes
|
||||||
December 2010
|
December 2010
|
||||||
|
|
||||||
%!style:../css/style.css
|
|
||||||
%!postproc(html): <H1> <H1><IMG src="../doc/Logos/gf0.png">
|
|
||||||
|
|
||||||
==Installation==
|
==Installation==
|
||||||
|
|
||||||
See the [download page http://www.grammaticalframework.org/download/index.html].
|
See the [download page http://www.grammaticalframework.org/download/index.html].
|
||||||
@@ -34,8 +31,3 @@ See the [download page http://www.grammaticalframework.org/download/index.html].
|
|||||||
|
|
||||||
- GF compiler: GPL
|
- GF compiler: GPL
|
||||||
- Run-time libraries and Resource Grammar Library: LGPL + BSD
|
- Run-time libraries and Resource Grammar Library: LGPL + BSD
|
||||||
|
|
||||||
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
[www.grammaticalframework.org http://www.grammaticalframework.org]
|
|
||||||
|
|||||||
@@ -1,10 +1,6 @@
|
|||||||
GF Version 3.3.3 Release Notes
|
GF Version 3.3.3 Release Notes
|
||||||
March 2012
|
March 2012
|
||||||
|
|
||||||
%!style:../css/style.css
|
|
||||||
%!postproc(html): <TITLE> <meta name = "viewport" content = "width = device-width"><TITLE>
|
|
||||||
%!postproc(html): <H1> <H1><IMG src="../doc/Logos/gf0.png">
|
|
||||||
|
|
||||||
==Installation==
|
==Installation==
|
||||||
|
|
||||||
See the [download page http://www.grammaticalframework.org/download/index.html].
|
See the [download page http://www.grammaticalframework.org/download/index.html].
|
||||||
@@ -25,8 +21,3 @@ See the [download page http://www.grammaticalframework.org/download/index.html].
|
|||||||
- Fix for a bug that prevented the shell commands ``abstract_info``,
|
- Fix for a bug that prevented the shell commands ``abstract_info``,
|
||||||
``generate_random`` and ``generate_trees`` from working properly.
|
``generate_random`` and ``generate_trees`` from working properly.
|
||||||
- Various other small improvements and bug fixes.
|
- Various other small improvements and bug fixes.
|
||||||
|
|
||||||
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
[www.grammaticalframework.org http://www.grammaticalframework.org]
|
|
||||||
|
|||||||
@@ -1,10 +1,6 @@
|
|||||||
GF Version 3.3 Release Notes
|
GF Version 3.3 Release Notes
|
||||||
October 2011
|
October 2011
|
||||||
|
|
||||||
%!style:../css/style.css
|
|
||||||
%!postproc(html): <TITLE> <meta name = "viewport" content = "width = device-width"><TITLE>
|
|
||||||
%!postproc(html): <H1> <H1><IMG src="../doc/Logos/gf0.png">
|
|
||||||
|
|
||||||
==Installation==
|
==Installation==
|
||||||
|
|
||||||
See the [download page http://www.grammaticalframework.org/download/index.html].
|
See the [download page http://www.grammaticalframework.org/download/index.html].
|
||||||
@@ -29,8 +25,3 @@ See the [download page http://www.grammaticalframework.org/download/index.html].
|
|||||||
and the web-based grammar editor.
|
and the web-based grammar editor.
|
||||||
- Faster grammar compilation (also included in the GF 3.2.9 source-only
|
- Faster grammar compilation (also included in the GF 3.2.9 source-only
|
||||||
snapshot).
|
snapshot).
|
||||||
|
|
||||||
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
[www.grammaticalframework.org http://www.grammaticalframework.org]
|
|
||||||
|
|||||||
@@ -1,10 +1,6 @@
|
|||||||
GF Version 3.4 Release Notes
|
GF Version 3.4 Release Notes
|
||||||
January 2013
|
January 2013
|
||||||
|
|
||||||
%!style:../css/style.css
|
|
||||||
%!postproc(html): <TITLE> <meta charset="UTF-8"><meta name = "viewport" content = "width = device-width"> <TITLE>
|
|
||||||
%!postproc(html): <H1> <H1><a href="../"><IMG src="../doc/Logos/gf0.png"></a>
|
|
||||||
|
|
||||||
==Installation==
|
==Installation==
|
||||||
|
|
||||||
See the [download page http://www.grammaticalframework.org/download/index.html].
|
See the [download page http://www.grammaticalframework.org/download/index.html].
|
||||||
@@ -46,8 +42,3 @@ See the [download page http://www.grammaticalframework.org/download/index.html].
|
|||||||
- Some new functionality in the web-based grammar editor, e.g. preliminary
|
- Some new functionality in the web-based grammar editor, e.g. preliminary
|
||||||
support for public grammars.
|
support for public grammars.
|
||||||
- Various other small improvements and bug fixes.
|
- Various other small improvements and bug fixes.
|
||||||
|
|
||||||
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
[www.grammaticalframework.org http://www.grammaticalframework.org]
|
|
||||||
|
|||||||
@@ -1,10 +1,6 @@
|
|||||||
GF 3.5 Release Notes
|
GF 3.5 Release Notes
|
||||||
August 2013
|
August 2013
|
||||||
|
|
||||||
%!style:../css/style.css
|
|
||||||
%!postproc(html): <TITLE> <meta charset="UTF-8"><meta name = "viewport" content = "width = device-width"> <TITLE>
|
|
||||||
%!postproc(html): <H1> <H1><a href="../"><IMG src="../doc/Logos/gf0.png"></a>
|
|
||||||
|
|
||||||
==Installation==
|
==Installation==
|
||||||
|
|
||||||
See the [download page http://www.grammaticalframework.org/download/index.html].
|
See the [download page http://www.grammaticalframework.org/download/index.html].
|
||||||
@@ -70,7 +66,3 @@ of GF 3.4.
|
|||||||
[``network-2.4.1.1`` https://github.com/haskell/network/commit/f2168b1f8978b4ad9c504e545755f0795ac869ce].
|
[``network-2.4.1.1`` https://github.com/haskell/network/commit/f2168b1f8978b4ad9c504e545755f0795ac869ce].
|
||||||
- Various other small improvements and bug fixes.
|
- Various other small improvements and bug fixes.
|
||||||
%- [...]
|
%- [...]
|
||||||
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
[www.grammaticalframework.org http://www.grammaticalframework.org]
|
|
||||||
|
|||||||
@@ -1,10 +1,6 @@
|
|||||||
GF 3.6 Release Notes
|
GF 3.6 Release Notes
|
||||||
June 2014
|
June 2014
|
||||||
|
|
||||||
%!style:../css/style.css
|
|
||||||
%!postproc(html): <TITLE> <meta charset="UTF-8"><meta name = "viewport" content = "width = device-width"> <TITLE>
|
|
||||||
%!postproc(html): <H1> <H1><a href="../"><IMG src="../doc/Logos/gf0.png"></a>
|
|
||||||
|
|
||||||
==Installation==
|
==Installation==
|
||||||
|
|
||||||
See the [download page http://www.grammaticalframework.org/download/index.html].
|
See the [download page http://www.grammaticalframework.org/download/index.html].
|
||||||
@@ -107,8 +103,3 @@ Closed [issues http://code.google.com/p/grammatical-framework/issues/list]:
|
|||||||
- ``c-wordforword``: this works as ``c-translate`` but does a
|
- ``c-wordforword``: this works as ``c-translate`` but does a
|
||||||
word-for-word lookup to create a (potentially very low quality)
|
word-for-word lookup to create a (potentially very low quality)
|
||||||
translation that can be used if all else fails.
|
translation that can be used if all else fails.
|
||||||
|
|
||||||
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
[www.grammaticalframework.org http://www.grammaticalframework.org]
|
|
||||||
|
|||||||
@@ -1,10 +1,6 @@
|
|||||||
GF 3.7.1 Release Notes
|
GF 3.7.1 Release Notes
|
||||||
October 2015
|
October 2015
|
||||||
|
|
||||||
%!style:../css/notes.css
|
|
||||||
%!postproc(html): <TITLE> <meta charset="UTF-8"><meta name = "viewport" content = "width = device-width"> <TITLE>
|
|
||||||
%!postproc(html): <H1> <H1><a href="../"><IMG src="../doc/Logos/gf0.png"></a>
|
|
||||||
|
|
||||||
==Installation==
|
==Installation==
|
||||||
|
|
||||||
See the [download page http://www.grammaticalframework.org/download/index.html].
|
See the [download page http://www.grammaticalframework.org/download/index.html].
|
||||||
@@ -77,8 +73,3 @@ Over 170 changes have been pushed to the source repository since
|
|||||||
you can leave ``&+`` uninterpreted instead of gluing the adjacent tokens.
|
you can leave ``&+`` uninterpreted instead of gluing the adjacent tokens.
|
||||||
This means that the output is left in a format that can be parsed in
|
This means that the output is left in a format that can be parsed in
|
||||||
a subsequent request.
|
a subsequent request.
|
||||||
|
|
||||||
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
[www.grammaticalframework.org http://www.grammaticalframework.org]
|
|
||||||
|
|||||||
@@ -1,10 +1,6 @@
|
|||||||
GF 3.7 Release Notes
|
GF 3.7 Release Notes
|
||||||
June 2015
|
June 2015
|
||||||
|
|
||||||
%!style:../css/style.css
|
|
||||||
%!postproc(html): <TITLE> <meta charset="UTF-8"><meta name = "viewport" content = "width = device-width"> <TITLE>
|
|
||||||
%!postproc(html): <H1> <H1><a href="../"><IMG src="../doc/Logos/gf0.png"></a>
|
|
||||||
|
|
||||||
==Installation==
|
==Installation==
|
||||||
|
|
||||||
See the [download page http://www.grammaticalframework.org/download/index.html].
|
See the [download page http://www.grammaticalframework.org/download/index.html].
|
||||||
@@ -103,8 +99,3 @@ Over 800 changes have been pushed to the source repository since
|
|||||||
unused for 24 hours, to keep memory use down in long running servers.
|
unused for 24 hours, to keep memory use down in long running servers.
|
||||||
- PGF service: limit the number of parallel calls to the C run-time parse
|
- PGF service: limit the number of parallel calls to the C run-time parse
|
||||||
function to 4 by default. The limit can be changed with the ``-j`` flag.
|
function to 4 by default. The limit can be changed with the ``-j`` flag.
|
||||||
|
|
||||||
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
[www.grammaticalframework.org http://www.grammaticalframework.org]
|
|
||||||
|
|||||||
@@ -1,10 +1,6 @@
|
|||||||
GF 3.8 Release Notes
|
GF 3.8 Release Notes
|
||||||
June 2016
|
June 2016
|
||||||
|
|
||||||
%!style:../css/notes.css
|
|
||||||
%!postproc(html): <TITLE> <meta charset="UTF-8"><meta name = "viewport" content = "width = device-width"> <TITLE>
|
|
||||||
%!postproc(html): <H1> <H1><a href="../"><IMG src="../doc/Logos/gf0.png"></a>
|
|
||||||
|
|
||||||
==Installation==
|
==Installation==
|
||||||
|
|
||||||
See the [download page http://www.grammaticalframework.org/download/index.html].
|
See the [download page http://www.grammaticalframework.org/download/index.html].
|
||||||
@@ -100,7 +96,3 @@ Roughly 400 changes have been pushed to the source repository since
|
|||||||
translations in the domain they cover.
|
translations in the domain they cover.
|
||||||
You can change the order in which the selected grammars are tried
|
You can change the order in which the selected grammars are tried
|
||||||
by dragging them up and down in the list.
|
by dragging them up and down in the list.
|
||||||
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
[www.grammaticalframework.org http://www.grammaticalframework.org]
|
|
||||||
|
|||||||
@@ -1,10 +1,6 @@
|
|||||||
GF 3.9 Release Notes
|
GF 3.9 Release Notes
|
||||||
August 2017
|
August 2017
|
||||||
|
|
||||||
%!style:../css/notes.css
|
|
||||||
%!postproc(html): <TITLE> <meta charset="UTF-8"><meta name = "viewport" content = "width = device-width"> <TITLE>
|
|
||||||
%!postproc(html): <H1> <H1><a href="../"><IMG src="../doc/Logos/gf0.png"></a>
|
|
||||||
|
|
||||||
==Installation==
|
==Installation==
|
||||||
|
|
||||||
See the [download page http://www.grammaticalframework.org/download/index.html].
|
See the [download page http://www.grammaticalframework.org/download/index.html].
|
||||||
@@ -66,8 +62,3 @@ the full functionality of the C runtime.
|
|||||||
|
|
||||||
- PGF service: support for language-specific depencency configurations in
|
- PGF service: support for language-specific depencency configurations in
|
||||||
``command=deptree``.
|
``command=deptree``.
|
||||||
|
|
||||||
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
[www.grammaticalframework.org http://www.grammaticalframework.org]
|
|
||||||
|
|||||||
BIN
favicon.ico
Normal file
BIN
favicon.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 15 KiB |
24
gf.cabal
24
gf.cabal
@@ -1,5 +1,5 @@
|
|||||||
name: gf
|
name: gf
|
||||||
version: 3.10
|
version: 3.10.3-git
|
||||||
|
|
||||||
cabal-version: >= 1.22
|
cabal-version: >= 1.22
|
||||||
build-type: Custom
|
build-type: Custom
|
||||||
@@ -11,7 +11,7 @@ description: GF, Grammatical Framework, is a programming language for multilingu
|
|||||||
homepage: http://www.grammaticalframework.org/
|
homepage: http://www.grammaticalframework.org/
|
||||||
bug-reports: https://github.com/GrammaticalFramework/gf-core/issues
|
bug-reports: https://github.com/GrammaticalFramework/gf-core/issues
|
||||||
maintainer: Thomas Hallgren
|
maintainer: Thomas Hallgren
|
||||||
tested-with: GHC==7.6.3, GHC==7.8.3, GHC==7.10.3, GHC==8.0.2
|
tested-with: GHC==7.10.3, GHC==8.0.2, GHC==8.2.2, GHC==8.4.3
|
||||||
|
|
||||||
data-dir: src
|
data-dir: src
|
||||||
data-files:
|
data-files:
|
||||||
@@ -81,7 +81,8 @@ Library
|
|||||||
random,
|
random,
|
||||||
pretty,
|
pretty,
|
||||||
mtl,
|
mtl,
|
||||||
exceptions
|
exceptions,
|
||||||
|
ghc-prim
|
||||||
hs-source-dirs: src/runtime/haskell
|
hs-source-dirs: src/runtime/haskell
|
||||||
|
|
||||||
other-modules:
|
other-modules:
|
||||||
@@ -97,7 +98,8 @@ Library
|
|||||||
--if impl(ghc>=7.8)
|
--if impl(ghc>=7.8)
|
||||||
-- ghc-options: +RTS -A20M -RTS
|
-- ghc-options: +RTS -A20M -RTS
|
||||||
ghc-prof-options: -fprof-auto
|
ghc-prof-options: -fprof-auto
|
||||||
extensions:
|
if impl(ghc>=8.6)
|
||||||
|
Default-extensions: NoMonadFailDesugaring
|
||||||
|
|
||||||
exposed-modules:
|
exposed-modules:
|
||||||
PGF
|
PGF
|
||||||
@@ -141,8 +143,8 @@ Library
|
|||||||
|
|
||||||
---- GF compiler as a library:
|
---- GF compiler as a library:
|
||||||
|
|
||||||
build-depends: filepath, directory, time, time-compat, old-locale,
|
build-depends: filepath, directory>=1.2, time,
|
||||||
process, haskeline, parallel>=3
|
process, haskeline, parallel>=3, json
|
||||||
|
|
||||||
hs-source-dirs: src/compiler
|
hs-source-dirs: src/compiler
|
||||||
exposed-modules:
|
exposed-modules:
|
||||||
@@ -150,6 +152,7 @@ Library
|
|||||||
GF.Support
|
GF.Support
|
||||||
GF.Text.Pretty
|
GF.Text.Pretty
|
||||||
GF.Text.Lexing
|
GF.Text.Lexing
|
||||||
|
GF.Grammar.Canonical
|
||||||
|
|
||||||
other-modules:
|
other-modules:
|
||||||
GF.Main GF.Compiler GF.Interactive
|
GF.Main GF.Compiler GF.Interactive
|
||||||
@@ -188,7 +191,10 @@ Library
|
|||||||
GF.Compile.PGFtoJava
|
GF.Compile.PGFtoJava
|
||||||
GF.Haskell
|
GF.Haskell
|
||||||
GF.Compile.ConcreteToHaskell
|
GF.Compile.ConcreteToHaskell
|
||||||
|
GF.Compile.GrammarToCanonical
|
||||||
|
GF.Grammar.CanonicalJSON
|
||||||
GF.Compile.PGFtoJS
|
GF.Compile.PGFtoJS
|
||||||
|
GF.Compile.PGFtoJSON
|
||||||
GF.Compile.PGFtoProlog
|
GF.Compile.PGFtoProlog
|
||||||
GF.Compile.PGFtoPython
|
GF.Compile.PGFtoPython
|
||||||
GF.Compile.ReadFiles
|
GF.Compile.ReadFiles
|
||||||
@@ -267,7 +273,7 @@ Library
|
|||||||
cpp-options: -DC_RUNTIME
|
cpp-options: -DC_RUNTIME
|
||||||
|
|
||||||
if flag(server)
|
if flag(server)
|
||||||
build-depends: httpd-shed>=0.4.0.3, network>=2.3 && <2.7, json,
|
build-depends: httpd-shed>=0.4.0.3, network>=2.3 && <2.7,
|
||||||
cgi>=3001.2.2.0
|
cgi>=3001.2.2.0
|
||||||
if flag(network-uri)
|
if flag(network-uri)
|
||||||
build-depends: network-uri>=2.6, network>=2.6
|
build-depends: network-uri>=2.6, network>=2.6
|
||||||
@@ -287,7 +293,9 @@ Library
|
|||||||
CGIUtils
|
CGIUtils
|
||||||
Cache
|
Cache
|
||||||
Fold
|
Fold
|
||||||
hs-source-dirs: src/server src/server/transfer
|
ExampleDemo
|
||||||
|
ExampleService
|
||||||
|
hs-source-dirs: src/server src/server/transfer src/example-based
|
||||||
|
|
||||||
if flag(interrupt)
|
if flag(interrupt)
|
||||||
cpp-options: -DUSE_INTERRUPT
|
cpp-options: -DUSE_INTERRUPT
|
||||||
|
|||||||
709
index.html
709
index.html
@@ -1,418 +1,390 @@
|
|||||||
<!DOCTYPE html>
|
<!doctype html>
|
||||||
<HTML>
|
<html lang="en">
|
||||||
<HEAD>
|
<head>
|
||||||
<TITLE>GF - Grammatical Framework</TITLE>
|
<meta charset="utf-8">
|
||||||
<meta charset="UTF-8">
|
|
||||||
<link rel="stylesheet" href="css/newstyle.css" title="GF">
|
|
||||||
<link rel="alternate" href="https://github.com/GrammaticalFramework/gf-core/" title="GF GitHub repository">
|
|
||||||
<meta name = "viewport" content = "width = device-width">
|
|
||||||
<script type="text/javascript">
|
|
||||||
function sitesearch() {
|
|
||||||
var q=document.forms[0].q.value;
|
|
||||||
var site=" site:www.grammaticalframework.org";
|
|
||||||
var search=encodeURIComponent(q+site)
|
|
||||||
document.location.href="http://www.google.com/search?q="+search
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
</script>
|
|
||||||
<meta name="keywords" content="machine translation">
|
|
||||||
</HEAD>
|
|
||||||
|
|
||||||
<body class=new>
|
<title>GF - Grammatical Framework</title>
|
||||||
|
<meta name="keywords" content="machine translation">
|
||||||
|
|
||||||
<div class="header sky blue">
|
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
|
||||||
<img class="gflogo" src="doc/Logos/gf1.svg" alt="">
|
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.1.3/css/bootstrap.min.css" integrity="sha384-MCw98/SFnGE8fJT3GXwEOngsV7Zt27NXFoaoApmYm81iuXoPkFOJwJ8ERdknLPMO" crossorigin="anonymous">
|
||||||
<H1>Grammatical Framework</H1>
|
<link rel="stylesheet" href="https://use.fontawesome.com/releases/v5.4.2/css/all.css" integrity="sha384-/rXc/GQVaYpyDdyxK+ecHPVYJSN9bmVFBvjA/9eOB+pb3F2w2N6fc5qB9Ew5yIns" crossorigin="anonymous">
|
||||||
<small class=tagline>A programming language for multilingual grammar applications</small>
|
|
||||||
|
<link rel="alternate" href="https://github.com/GrammaticalFramework/gf-core/" title="GF GitHub repository">
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
<div class="container-fluid my-5" style="max-width:1200px">
|
||||||
|
|
||||||
|
<div class="text-center">
|
||||||
|
<img style="height:250px" src="doc/Logos/gf1.svg" alt="GF Logo">
|
||||||
|
<h1 class="display-4" style="text-shadow: 1px 1px 5px #999;">Grammatical Framework</h1>
|
||||||
|
<h4 class="text-black-50">A programming language for multilingual grammar applications</h4>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class=menu>
|
<div class="row my-4">
|
||||||
|
|
||||||
<div class=links>
|
<div class="col-sm-6 col-md-3">
|
||||||
<h4>Use GF</h4>
|
<h3>Get started</h3>
|
||||||
<ul>
|
<ul class="mb-2">
|
||||||
<li><a href="http://cloud.grammaticalframework.org/">GF Cloud<img class=right src="src/www/P/gf-cloud.png" alt="GF Cloud Service" title="GF Cloud Service"></a>
|
<li><a href="https://www.youtube.com/watch?v=x1LFbDQhbso">Google Tech Talk</a></li>
|
||||||
<li><A HREF="demos/index.html">Other Demos</A>
|
<li>
|
||||||
</ul>
|
<a href="http://cloud.grammaticalframework.org/">
|
||||||
<ul>
|
GF Cloud
|
||||||
<li><A HREF="http://www.grammaticalframework.org/download/index.html"><b>Download GF</b></A>
|
<img src="http://www.grammaticalframework.org/src/www/P/gf-cloud.png" style="height:30px" class="ml-2" alt="Cloud logo">
|
||||||
<li><a href="doc/gf-editor-modes.html">GF Editor Modes</a>
|
</a>
|
||||||
</ul>
|
</li>
|
||||||
|
<li>
|
||||||
|
<a href="doc/tutorial/gf-tutorial.html">Tutorial</a>
|
||||||
|
/
|
||||||
|
<a href="lib/doc/rgl-tutorial/index.html">RGL Tutorial</a>
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
|
|
||||||
<ul>
|
<a href="download/index.html" class="btn btn-primary ml-3">
|
||||||
<li><A HREF="http://groups.google.com/group/gf-dev">User Group</A>
|
<i class="fas fa-download mr-1"></i>
|
||||||
<li><a href="https://github.com/GrammaticalFramework/gf-core/issues">Bug Reports</a>
|
Download GF
|
||||||
(<a href="http://code.google.com/p/grammatical-framework/issues/list">old</a>)
|
</a>
|
||||||
</ul>
|
</div>
|
||||||
</div>
|
|
||||||
|
|
||||||
<div class=links>
|
<div class="col-sm-6 col-md-3">
|
||||||
<h4>Learn GF</h4>
|
<h3>Learn more</h3>
|
||||||
<ul>
|
|
||||||
<li><a href="https://www.youtube.com/watch?v=x1LFbDQhbso">Google Tech Talk</a>
|
|
||||||
<li><A HREF="doc/gf-quickstart.html">QuickStart</A>
|
|
||||||
<li><A HREF="doc/gf-reference.html">QuickRefCard</A>
|
|
||||||
<li><A HREF="doc/gf-shell-reference.html">GF Shell Reference</A>
|
|
||||||
<li><a href="http://school.grammaticalframework.org/"><b>GF Summer School</b></a>
|
|
||||||
</ul>
|
|
||||||
<ul>
|
|
||||||
<li><A HREF="gf-book">The GF Book</A>
|
|
||||||
<li><A HREF="doc/tutorial/gf-tutorial.html">GF Tutorial</A>
|
|
||||||
<li><A HREF="doc/gf-refman.html">Reference Manual</A>
|
|
||||||
<li><A HREF="http://www.molto-project.eu/sites/default/files/MOLTO_D2.3.pdf">Best Practices</A> <small>[PDF]</small>
|
|
||||||
</ul>
|
|
||||||
<ul>
|
|
||||||
<li><A HREF="lib/doc/synopsis.html">Library Synopsis</A>
|
|
||||||
<li><A HREF="doc/gf-lrec-2010.pdf">Library Tutorial</A> <small>[PDF]</small>
|
|
||||||
<li><A HREF="http://www.postcrashgames.com/gf_world/">Coverage Map</A>
|
|
||||||
</ul>
|
|
||||||
|
|
||||||
</div>
|
<ul class="mb-2">
|
||||||
<div class=links>
|
<li><a href="gf-book">The GF Book</a></li>
|
||||||
<h4>Develop GF</h4>
|
<li><a href="doc/gf-refman.html">Reference Manual</a></li>
|
||||||
<ul>
|
<li><a href="doc/gf-shell-reference.html">Shell Reference</a></li>
|
||||||
<li><a href="doc/gf-developers.html">GF Developers Guide</a>
|
<li><a href="http://www.molto-project.eu/sites/default/files/MOLTO_D2.3.pdf">Best Practices</a> <small>[PDF]</small></li>
|
||||||
<li><A HREF="https://github.com/GrammaticalFramework/">GF on GitHub</A>
|
</ul>
|
||||||
<li><a href="/~hallgren/gf-experiment/browse/">Browse Source Code</a>
|
|
||||||
<li><A HREF="doc/gf-people.html">Authors</A>
|
|
||||||
</ul>
|
|
||||||
<h4>Develop Applications</h4>
|
|
||||||
<ul>
|
|
||||||
<li><a href="http://hackage.haskell.org/package/gf-3.9/docs/PGF.html">PGF library API (Old Runtime)</a>
|
|
||||||
<li><a href="doc/runtime-api.html">PGF library API (New Runtime)</a>
|
|
||||||
<li><a href="https://github.com/GrammaticalFramework/gf-offline-translator/tree/master/android">GF on Android (new)</a>
|
|
||||||
<li><A HREF="/android/">GF on Android (old) </A>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
<div class=links>
|
|
||||||
<h4>Related to GF</h4>
|
|
||||||
<ul>
|
|
||||||
<li><A HREF="doc/gf-bibliography.html">Publications</A>
|
|
||||||
<li><A HREF="http://remu.grammaticalframework.org/">The REMU Project</A>
|
|
||||||
<li><A HREF="http://www.molto-project.eu">The MOLTO Project</A>
|
|
||||||
<li><a href="http://en.wikipedia.org/wiki/Grammatical_Framework">GF on Wikipedia</a>
|
|
||||||
<li><p><a href="Http://www.digitalgrammars.com/">Digital Grammars AB</a>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
|
<a href="lib/doc/synopsis/index.html" class="btn btn-primary ml-3">
|
||||||
|
<i class="fab fa-readme mr-1"></i>
|
||||||
|
RGL Synopsis
|
||||||
|
</a>
|
||||||
|
</div>
|
||||||
|
|
||||||
<script type="text/javascript">
|
<div class="col-sm-6 col-md-3">
|
||||||
document.write('<div style="float: right; margin-top: 3ex;"> <form onsubmit="return sitesearch()" method=get action="http://www.google.com/search"> <input type=search name="q" placeholder="site search"> <input type=submit value="Search"> </form></div>')
|
<h3>Develop</h3>
|
||||||
</script>
|
<ul class="mb-2">
|
||||||
|
<li><a href="doc/gf-developers.html">Developers Guide</a></li>
|
||||||
|
<!-- <li><a href="/~hallgren/gf-experiment/browse/">Browse Source Code</a></li> -->
|
||||||
|
<li><a href="http://hackage.haskell.org/package/gf/docs/PGF.html">PGF library API (Haskell runtime)</a></li>
|
||||||
|
<li><a href="doc/runtime-api.html">PGF library API (C runtime)</a></li>
|
||||||
|
<li><a href="http://hackage.haskell.org/package/gf/docs/GF.html">GF compiler API</a></li>
|
||||||
|
<!-- <li><a href="src/ui/android/README">GF on Android (new)</a></li>
|
||||||
|
<li><a href="/android/">GF on Android (old) </a></li> -->
|
||||||
|
<li><a href="doc/gf-editor-modes.html">Text Editor Support</a></li>
|
||||||
|
</ul>
|
||||||
|
</div>
|
||||||
|
|
||||||
<H2 class=noclear>News</H2>
|
<div class="col-sm-6 col-md-3">
|
||||||
|
<h3>Contribute</h3>
|
||||||
<div class=news2>
|
<ul class="mb-2">
|
||||||
|
<li><a href="http://groups.google.com/group/gf-dev">Mailing List</a></li>
|
||||||
<table class=news>
|
<li><a href="https://github.com/GrammaticalFramework/gf-core/issues">Issue Tracker</a></li>
|
||||||
<tr><td>2018-07-25:<td>The GF repository has been split in two:
|
<li><a href="doc/gf-people.html">Authors</a></li>
|
||||||
<a href="https://github.com/GrammaticalFramework/gf-core">gf-core</a> and
|
<li><a href="http://school.grammaticalframework.org/2018/">Summer School</a></li>
|
||||||
<a href="https://github.com/GrammaticalFramework/gf-rgl">gf-rgl</a>.
|
</ul>
|
||||||
The original <a href="https://github.com/GrammaticalFramework/GF">GF</a> repository is now archived.
|
<a href="https://github.com/GrammaticalFramework/" class="btn btn-primary ml-3">
|
||||||
<tr><td>2017-08-11:<td><strong>GF 3.9 released!</strong>
|
<i class="fab fa-github mr-1"></i>
|
||||||
<a href="download/release-3.9.html">Release notes</a>.
|
GF on GitHub
|
||||||
<tr><td>2017-06-29:<td>GF is moving to <a href="https://github.com/GrammaticalFramework/GF/">GitHub</a>!
|
</a>
|
||||||
<tr><td>2017-03-13:<td><strong>GF Summer School in Riga (Latvia), 14-25 August 2017</strong>
|
</div>
|
||||||
<a href="http://school.grammaticalframework.org/2017/">Summer
|
|
||||||
School web page</a>.
|
|
||||||
<tr><td>2016-09-07:<td><strong>Google Tech Talk on GF</strong> <a
|
|
||||||
href="https://www.youtube.com/watch?v=x1LFbDQhbso">on Youtube</a>.
|
|
||||||
<tr><td>2016-07-05:<td>GitHub mirror temporarily not updated, due to server migration.
|
|
||||||
<tr><td>2016-06-22:<td><strong>GF 3.8 released!</strong>
|
|
||||||
<a href="download/release-3.8.html">Release notes</a>.
|
|
||||||
<tr><td>2015-06-21:<td><strong>Summer School in Rule-Based Machine
|
|
||||||
Translation in Alacant/Alicante (Spain), 11-21 July 2016</strong>
|
|
||||||
featuring GF, Apertium, Matxin, and TectoMT.
|
|
||||||
<a href="http://xixona.dlsi.ua.es/rbmt-summer-school/2016/">Summer
|
|
||||||
School web page</a>.
|
|
||||||
<tr><td>2016-06-14:<td>New resource grammar language: Nynorsk.
|
|
||||||
<tr><td>2015-10-02:<td><strong>GF 3.7.1 released!</strong>
|
|
||||||
<a href="download/release-3.7.1.html">Release notes</a>.
|
|
||||||
<tr><td>2015-06-25:<td><strong>GF 3.7 released!</strong>
|
|
||||||
<a href="download/release-3.7.html">Release notes</a>.
|
|
||||||
<tr><td>2015-03-13:<td>New resource grammar language: Mongolian.
|
|
||||||
See <a href="lib/doc/synopsis.html">library synopsis</a>.
|
|
||||||
<tr><td>2015-02-09:<td><strong>GF Summer School in Gozo (Malta), 13-24 July 2015</strong>
|
|
||||||
<a href="http://school.grammaticalframework.org/2015/">Summer
|
|
||||||
School web page</a>.
|
|
||||||
<tr><td>2014-06-23:<td><strong>GF 3.6 released!</strong>
|
|
||||||
<a href="download/release-3.6.html">Release notes</a>.
|
|
||||||
<tr><td>2014-03-11:
|
|
||||||
<td>A company for commercial applications of GF has been founded:
|
|
||||||
<a href="http://www.digitalgrammars.com/">Digital Grammars</a>.
|
|
||||||
<tr><td>2013-11-25:
|
|
||||||
<td>The default character encoding in GF grammar files will be changed
|
|
||||||
from Latin-1 to UTF-8. See
|
|
||||||
<a href="download/encoding-change.html">GF character encoding changes</a>
|
|
||||||
for details.
|
|
||||||
<tr><td>2013-10-18:<td>New resource grammar language: Estonian.
|
|
||||||
See <a href="lib/doc/synopsis.html">library synopsis</a>.
|
|
||||||
<tr><td>2013-09-18:<td>New <a href="https://github.com/GrammaticalFramework/gf-contrib">GF contributions repository</a>, hosted on GitHub.
|
|
||||||
<tr><td>2013-08-06:<td><strong>GF 3.5 released!</strong>
|
|
||||||
<a href="download/release-3.5.html">Release notes</a>.
|
|
||||||
<tr><td>2013-07-26:<td>Started a page with <A HREF="lib/doc/rgl-publications.html">RGL Documentation and Publications</A>.
|
|
||||||
<tr><td>2013-06-24:<td>We are now running the IRC channel <a href="https://webchat.freenode.net/?channels=gf"><strong><code>#gf</code></strong></a> on the Freenode network.
|
|
||||||
<tr><td>2013-06-19:<td>New resource grammar language: Maltese.
|
|
||||||
See <a href="lib/doc/synopsis.html">library synopsis</a>.
|
|
||||||
<tr><td>2013-04-25:<td>New resource grammar language: Greek.
|
|
||||||
See <a href="lib/doc/synopsis.html">library synopsis</a>.
|
|
||||||
<tr><td>2013-01-31:<td><strong>GF 3.4 released!</strong>
|
|
||||||
<a href="download/release-3.4.html">Release notes</a>.
|
|
||||||
<tr><td>2012-12-10:<td>
|
|
||||||
<a href="http://www.postcrashgames.com/gf_world/">Resource Grammar Library
|
|
||||||
coverage map</a>, created by Tommi Nieminen.
|
|
||||||
<!--
|
|
||||||
<tr><td>2012-11-18:<td>
|
|
||||||
<A HREF="http://school.grammaticalframework.org/2013">GF Summer School</A>
|
|
||||||
in Frauenchiemsee, 18-30 August 2013.
|
|
||||||
<tr><td>2012-11-18:<td>New resource grammar language: Chinese.
|
|
||||||
See <a href="lib/doc/synopsis.html">library synopsis</a>.
|
|
||||||
Complete but not yet perfect.
|
|
||||||
<tr><td>2012-06-29:<td>GF sources now mirrored in GitHub, with change
|
|
||||||
statistics and other browsing features.
|
|
||||||
See <a href="https://github.com/GrammaticalFramework/GF/"><tt>github.com/GrammaticalFramework/GF</tt></a>
|
|
||||||
<tr><td>2012-05-07:<td>New resource grammar language: Japanese.
|
|
||||||
See <a href="lib/doc/synopsis.html">library synopsis</a>.
|
|
||||||
<tr><td>2012-03-23:<td>There will be a
|
|
||||||
<a href="gf-tutorial-icfp-2012/">GF tutorial at ICFP 2012</a>
|
|
||||||
in Copenhagen.
|
|
||||||
<tr><td>2012-03-03:<td><strong>GF 3.3.3 released!</strong>
|
|
||||||
<a href="download/release-3.3.3.html">Release notes</a>.
|
|
||||||
<tr><td>2012-02-24:<td>New resource grammar languages: Hindi, Sindhi.
|
|
||||||
See <a href="lib/doc/synopsis.html">library synopsis</a>.
|
|
||||||
<tr><td>2011-12-29:<td>New resource grammar languages: Latvian, Thai.
|
|
||||||
See <a href="lib/doc/synopsis.html">library synopsis</a>.
|
|
||||||
<tr><td>2011-10-27:<td><strong>GF 3.3 released!</strong>
|
|
||||||
<a href="download/release-3.3.html">Release notes</a>.
|
|
||||||
<tr><td>2011-09-20:<td>There is now a page collecting
|
|
||||||
<a href="doc/gf-editor-modes.html">editor modes for GF</a>.
|
|
||||||
Contributions are welcome!
|
|
||||||
<tr><td>2011-09-12:<td><strong>GF 3.2.9</strong> source snapshot with faster grammar compilation available. See <a href="download/index.html">Downloads</a>.
|
|
||||||
<tr><td>2011-04-22:<td><a href="android/tutorial/">JPGF Android Tutorial</a> added.
|
|
||||||
<tr><td>2011-04-15:<td>The <a href="gf-book">GF Book</a> is available.
|
|
||||||
<tr><td>2011-01-13:<td><a href="http://www.molto-project.eu/node/1177">Phrasedroid
|
|
||||||
available on the Android Market</a>.
|
|
||||||
<tr><td>2011-01-04:<td>GF is part of the
|
|
||||||
<a href="http://www.clt.gu.se/clt-toolkit">CLT Toolkit</a>.
|
|
||||||
<tr><td>2010-12-23:<td><strong>GF 3.2 released!</strong>
|
|
||||||
<a href="download/release-3.2.html">Release notes</a>.
|
|
||||||
-->
|
|
||||||
</table>
|
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<H2>What is GF</H2>
|
<h2>What is GF?</h2>
|
||||||
<P>
|
<p>
|
||||||
GF, Grammatical Framework, is a programming language for
|
GF, Grammatical Framework, is a programming language for
|
||||||
<B>multilingual grammar applications</B>. It is
|
<strong>multilingual grammar applications</strong>. It is
|
||||||
</P>
|
</p>
|
||||||
<UL>
|
<ul>
|
||||||
<LI>a <B>special-purpose language for grammars</B>, like
|
<li>a <strong>special-purpose language for grammars</strong>, like
|
||||||
<a href="http://dinosaur.compilertools.net/yacc/">YACC</a>,
|
<a href="http://dinosaur.compilertools.net/yacc/">YACC</a>,
|
||||||
<a href="http://www.gnu.org/software/bison/">Bison</a>,
|
<a href="http://www.gnu.org/software/bison/">Bison</a>,
|
||||||
<a href="http://www.haskell.org/happy/">Happy</a>,
|
<a href="http://www.haskell.org/happy/">Happy</a>,
|
||||||
<a href="http://bnfc.digitalgrammars.com/">BNFC</a>,
|
<a href="http://bnfc.digitalgrammars.com/">BNFC</a>,
|
||||||
but not restricted to programming languages
|
but not restricted to programming languages
|
||||||
<LI>a <B>functional programming language</B>, like
|
</li>
|
||||||
|
<li>a <strong>functional programming language</strong>, like
|
||||||
<a href="http://www.haskell.org/">Haskell</a>,
|
<a href="http://www.haskell.org/">Haskell</a>,
|
||||||
<a href="http://en.wikipedia.org/wiki/Lisp_(programming_language)">Lisp</a>,
|
<a href="http://en.wikipedia.org/wiki/Lisp_(programming_language)">Lisp</a>,
|
||||||
<a href="http://ocaml.org/">OCaml</a>,
|
<a href="http://ocaml.org/">OCaml</a>,
|
||||||
<a href="http://www.smlnj.org/">SML</a>,
|
<a href="http://www.smlnj.org/">SML</a>,
|
||||||
<a href="http://schemers.org/">Scheme</a>,
|
<a href="http://schemers.org/">Scheme</a>,
|
||||||
but specialized to grammar writing
|
but specialized to grammar writing
|
||||||
<LI>a <B>development platform for natural language grammars</B>, like
|
</li>
|
||||||
|
<li>a <strong>development platform for natural language grammars</strong>, like
|
||||||
<a href="http://moin.delph-in.net/LkbTop">LKB</a>,
|
<a href="http://moin.delph-in.net/LkbTop">LKB</a>,
|
||||||
<a href="http://www2.parc.com/isl/groups/nltt/xle/">XLE</a>,
|
<a href="http://www2.parc.com/isl/groups/nltt/xle/">XLE</a>,
|
||||||
<a href="http://www.issco.unige.ch/en/research/projects/regulus/news.shtml">Regulus</a>,
|
<a href="http://www.issco.unige.ch/en/research/projects/regulus/news.shtml">Regulus</a>,
|
||||||
but based on functional programming and type theory
|
but based on functional programming and type theory
|
||||||
<LI>a <B>categorial grammar formalism</B>, like
|
</li>
|
||||||
|
<li>a <strong>categorial grammar formalism</strong>, like
|
||||||
<a href="http://www.loria.fr/equipes/calligramme/acg/">ACG</a>,
|
<a href="http://www.loria.fr/equipes/calligramme/acg/">ACG</a>,
|
||||||
<a href="http://openccg.sourceforge.net/">CCG</a>,
|
<a href="http://openccg.sourceforge.net/">CCG</a>,
|
||||||
but specialized for multilingual grammars,
|
but specialized for multilingual grammars,
|
||||||
<LI>a <B>logical framework</B>, like
|
</li>
|
||||||
|
<li>a <strong>logical framework</strong>, like
|
||||||
<a href="http://wiki.portal.chalmers.se/agda/pmwiki.php">Agda</a>,
|
<a href="http://wiki.portal.chalmers.se/agda/pmwiki.php">Agda</a>,
|
||||||
<a href="http://coq.inria.fr/">Coq</a>,
|
<a href="http://coq.inria.fr/">Coq</a>,
|
||||||
<a href="http://www.cl.cam.ac.uk/research/hvg/Isabelle/">Isabelle</a>,
|
<a href="http://www.cl.cam.ac.uk/research/hvg/Isabelle/">Isabelle</a>,
|
||||||
but equipped with concrete syntax in addition to logic
|
but equipped with concrete syntax in addition to logic
|
||||||
<li>a <b>platform for machine translation</b>, like
|
</li>
|
||||||
|
<li>a <strong>platform for machine translation</strong>, like
|
||||||
<a href="http://www.statmt.org/moses/">Moses</a>,
|
<a href="http://www.statmt.org/moses/">Moses</a>,
|
||||||
<a href="http://www.apertium.org/">Apertium</a>,
|
<a href="http://www.apertium.org/">Apertium</a>,
|
||||||
but based on deep structural analysis (and usually applied for
|
but based on deep structural analysis (and usually applied for
|
||||||
limited fragments of language).
|
limited fragments of language).
|
||||||
</UL>
|
</li>
|
||||||
|
</ul>
|
||||||
|
|
||||||
<P>
|
<p>
|
||||||
Don't worry if you don't know most of the references above - but if you do know at
|
Don't worry if you don't know most of the references above - but if you do know at
|
||||||
least one, it may help you to get a first idea of what GF is.
|
least one, it may help you to get a first idea of what GF is.
|
||||||
</P>
|
|
||||||
<H2>Applications</H2>
|
|
||||||
<P>
|
|
||||||
GF can be used for building
|
|
||||||
</P>
|
|
||||||
<UL>
|
|
||||||
<LI><A HREF="http://cloud.grammaticalframework.org/translator/">translation systems</A>
|
|
||||||
<LI><A HREF="http://cloud.grammaticalframework.org/minibar/minibar.html">multilingual web gadgets</A>
|
|
||||||
<LI><A HREF="http://www.cs.chalmers.se/~hallgren/Alfa/Tutorial/GFplugin.html">natural-language interfaces</A>
|
|
||||||
<LI><A HREF="http://www.youtube.com/watch?v=1bfaYHWS6zU">dialogue systems</A>
|
|
||||||
<LI><A HREF="lib/doc/synopsis.html">natural language resources</A>
|
|
||||||
</UL>
|
|
||||||
|
|
||||||
<H2>Availability</H2>
|
|
||||||
<P>
|
|
||||||
GF is <B>open-source</B>, licensed under <A HREF="LICENSE">GPL</A> (the program) and
|
|
||||||
<A HREF="./LICENSE">LGPL</A> and <A HREF="./LICENSE">BSD</A> (the libraries). It
|
|
||||||
is available for
|
|
||||||
</P>
|
|
||||||
<UL>
|
|
||||||
<LI>Linux
|
|
||||||
<LI>Mac OS X
|
|
||||||
<LI>Windows
|
|
||||||
<li>Android mobile platform (via Java; runtime)
|
|
||||||
<LI>via compilation to JavaScript, almost any platform that has a web browser (runtime)
|
|
||||||
</UL>
|
|
||||||
|
|
||||||
<H2>Projects</H2>
|
|
||||||
<P>
|
|
||||||
GF was first created in 1998 at
|
|
||||||
<A HREF="http://www.xrce.xerox.com/">Xerox Research Centre Europe</A>,
|
|
||||||
Grenoble, in the project
|
|
||||||
Multilingual Document Authoring. At Xerox, it was used for prototypes including
|
|
||||||
a restaurant phrase book,
|
|
||||||
a database query system,
|
|
||||||
a formalization of an alarm system instructions with translations to 5 languages, and
|
|
||||||
an authoring system for medical drug descriptions.
|
|
||||||
</P>
|
|
||||||
<P>
|
|
||||||
Later projects using GF and involving third parties include, in chronological order,
|
|
||||||
</P>
|
|
||||||
<UL>
|
|
||||||
<LI><A HREF="http://www.cs.chalmers.se/~hallgren/Alfa/Tutorial/GFplugin.html">GF-Alfa</A>:
|
|
||||||
natural language interface to formal proofs
|
|
||||||
<LI><A HREF="http://efficient.citi.tudor.lu/index_noframe.html">Efficient</A>:
|
|
||||||
authoring tool for business models.
|
|
||||||
<LI><A HREF="http://www.key-project.org/">GF-KeY</A>:
|
|
||||||
authoring and translation of software specifications
|
|
||||||
<LI><A HREF="http://www.talk-project.org">TALK</A>:
|
|
||||||
multilingual and multimodal spoken dialogue systems
|
|
||||||
<LI><A HREF="http://webalt.math.helsinki.fi/">WebALT</A>:
|
|
||||||
multilingual generation of mathematical exercises (commercial project)
|
|
||||||
<LI><A HREF="http://spraakbanken.gu.se/sal/">SALDO</A>:
|
|
||||||
Swedish morphological dictionary based on rules developed for GF and
|
|
||||||
<A HREF="http://www.cse.chalmers.se/alumni/markus/FM/">Functional Morphology</A>
|
|
||||||
<LI><a href="http://www.molto-project.eu">MOLTO</a>:
|
|
||||||
multilingual online translation
|
|
||||||
<LI><a href="http://remu.grammaticalframework.org">REMU</a>:
|
|
||||||
reliable multilingual digital communication.
|
|
||||||
</UL>
|
|
||||||
<p>
|
|
||||||
Here is a <a
|
|
||||||
href="http://videos.xrce.xerox.com/index.php/videos/index/618">talk
|
|
||||||
about GF at XRCE</a>,
|
|
||||||
14 years later.
|
|
||||||
|
|
||||||
<P>
|
|
||||||
Academically, GF has been used in at least ten PhD theses and resulted
|
|
||||||
in more than a hundred
|
|
||||||
scientific publications (see <A HREF="doc/gf-bibliography.html">GF publication list</A>).
|
|
||||||
</P>
|
|
||||||
<H2>Programming in GF</H2>
|
|
||||||
<P>
|
|
||||||
GF is easy to learn by following the <A HREF="doc/tutorial/gf-tutorial.html">tutorial</A>.
|
|
||||||
You can write your first translator in 15 minutes.
|
|
||||||
</P>
|
|
||||||
<P>
|
|
||||||
GF has an interactive command interpreter, as well as a batch compiler.
|
|
||||||
Grammars can be compiled to parser and translator code in many different
|
|
||||||
formats. These components can then be embedded in applications written
|
|
||||||
in other programming languages. The formats currently supported are:
|
|
||||||
</P>
|
|
||||||
<UL>
|
|
||||||
<LI>Haskell
|
|
||||||
<li>Java, in particular the Android platform
|
|
||||||
<LI>JavaScript
|
|
||||||
<LI>Speech recognition: HTK/ATK, Nuance, JSGF
|
|
||||||
</UL>
|
|
||||||
|
|
||||||
<P>
|
|
||||||
The GF programming language is high-level and advanced, featuring
|
|
||||||
</P>
|
|
||||||
<UL>
|
|
||||||
<LI>static type checking
|
|
||||||
<LI>higher-order functions
|
|
||||||
<LI>dependent types
|
|
||||||
<LI>pattern matching with data constructors and regular expressions
|
|
||||||
<LI>module system with multiple inheritance and parametrized modules
|
|
||||||
</UL>
|
|
||||||
|
|
||||||
<h3>Getting help</h3>
|
|
||||||
<p>
|
|
||||||
If you need some help with GF, the first places to start are the <a href="doc/tutorial/gf-tutorial.html">Tutorial</a> and <a href="doc/gf-refman.html">Reference</a> pages.
|
|
||||||
The printed <a href="gf-book">book</a> contains all the material in the tutorial and some extra bits, and is the recommended reference for GF.
|
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
<p>
|
<hr>
|
||||||
We run the IRC channel <strong><code>#gf</code></strong> on the Freenode network, where you are welcome to look for help with small questions or just start a general discussion.
|
|
||||||
IRC logs (in raw format) are available <a href="irc/">here</a>.
|
|
||||||
If you have a larger question which the community may benefit from, we recommend you ask it on the <a href="http://groups.google.com/group/gf-dev">mailing list</a>.
|
|
||||||
</p>
|
|
||||||
|
|
||||||
<H2>Libraries</H2>
|
<div class="row">
|
||||||
<P>
|
|
||||||
Libraries are at the heart of modern software engineering. In natural language
|
|
||||||
applications, libraries are a way to cope with thousands of details involved in
|
|
||||||
syntax, lexicon, and inflection. The
|
|
||||||
<A HREF="lib/doc/synopsis.html">GF resource grammar library</A> has
|
|
||||||
support for an increasing number of languages, currently including
|
|
||||||
</P>
|
|
||||||
<ol class=languages>
|
|
||||||
<LI>Afrikaans
|
|
||||||
<LI>Amharic (partial)
|
|
||||||
<LI>Arabic (partial)
|
|
||||||
<LI>Bulgarian
|
|
||||||
<LI>Catalan
|
|
||||||
<LI>Chinese
|
|
||||||
<LI>Danish
|
|
||||||
<LI>Dutch
|
|
||||||
<LI>English
|
|
||||||
<LI>Estonian
|
|
||||||
<LI>Finnish
|
|
||||||
<LI>French
|
|
||||||
<LI>German
|
|
||||||
<li>Greek ancient (partial)
|
|
||||||
<li>Greek modern
|
|
||||||
<li>Hebrew (fragments)
|
|
||||||
<LI>Hindi
|
|
||||||
<LI><A HREF="http://www.interlingua.com/">Interlingua</A>
|
|
||||||
<LI>Japanese
|
|
||||||
<LI>Italian
|
|
||||||
<LI>Latin (fragments)
|
|
||||||
<LI>Latvian
|
|
||||||
<li>Maltese
|
|
||||||
<li>Mongolian
|
|
||||||
<LI>Nepali
|
|
||||||
<LI>Norwegian bokmål
|
|
||||||
<LI>Norwegian nynorsk
|
|
||||||
<LI>Persian
|
|
||||||
<LI>Polish
|
|
||||||
<li>Punjabi
|
|
||||||
<LI>Romanian
|
|
||||||
<LI>Russian
|
|
||||||
<LI>Sindhi
|
|
||||||
<LI>Slovene (partial)
|
|
||||||
<LI>Spanish
|
|
||||||
<li>Swahili (fragments)
|
|
||||||
<LI>Swedish
|
|
||||||
<LI>Thai
|
|
||||||
<LI>Turkish (fragments)
|
|
||||||
<LI>Urdu
|
|
||||||
</ol>
|
|
||||||
|
|
||||||
<P>
|
<div class="col-md-6">
|
||||||
Adding a language to the resource library takes 3 to 9
|
<h2>Applications & Availability</h2>
|
||||||
months - contributions
|
<p>
|
||||||
are welcome! You can start with the <A HREF="doc/gf-lrec-2010.pdf">resource grammarian's tutorial</A>.
|
GF can be used for building
|
||||||
|
<a href="http://cloud.grammaticalframework.org/translator/">translation systems</a>,
|
||||||
|
<a href="http://cloud.grammaticalframework.org/minibar/minibar.html">multilingual web gadgets</a>,
|
||||||
|
<a href="http://www.cs.chalmers.se/~hallgren/Alfa/Tutorial/GFplugin.html">natural-language interfaces</a>,
|
||||||
|
<a href="http://www.youtube.com/watch?v=1bfaYHWS6zU">dialogue systems</a>, and
|
||||||
|
<a href="lib/doc/synopsis/index.html">natural language resources</a>.
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<p>
|
||||||
|
GF is <strong>open-source</strong>, licensed under <a href="LICENSE">GPL</a> (the program) and
|
||||||
|
<a href="LICENSE">LGPL</a> and <a href="LICENSE">BSD</a> (the libraries). It
|
||||||
|
is available for
|
||||||
|
</p>
|
||||||
|
<ul>
|
||||||
|
<li>Linux</li>
|
||||||
|
<li>macOS</li>
|
||||||
|
<li>Windows</li>
|
||||||
|
<li>Android mobile platform (via Java; runtime)</li>
|
||||||
|
<li>via compilation to JavaScript, almost any platform that has a web browser (runtime)</li>
|
||||||
|
</ul>
|
||||||
|
|
||||||
|
<h2>Programming in GF</h2>
|
||||||
|
<p>
|
||||||
|
GF is easy to learn by following the <a href="doc/tutorial/gf-tutorial.html">tutorial</a>.
|
||||||
|
You can write your first translator in 15 minutes.
|
||||||
|
</p>
|
||||||
|
<p>
|
||||||
|
GF has an interactive command interpreter, as well as a batch compiler.
|
||||||
|
Grammars can be compiled to parser and translator code in many different
|
||||||
|
formats. These components can then be embedded in applications written
|
||||||
|
in other programming languages. The formats currently supported are:
|
||||||
|
</p>
|
||||||
|
<ul>
|
||||||
|
<li>Haskell</li>
|
||||||
|
<li>Java, in particular the Android platform</li>
|
||||||
|
<li>JavaScript</li>
|
||||||
|
<li>Speech recognition: HTK/ATK, Nuance, JSGF</li>
|
||||||
|
</ul>
|
||||||
|
|
||||||
|
<p>
|
||||||
|
The GF programming language is high-level and advanced, featuring:
|
||||||
|
</p>
|
||||||
|
<ul>
|
||||||
|
<li>static type checking</li>
|
||||||
|
<li>higher-order functions</li>
|
||||||
|
<li>dependent types</li>
|
||||||
|
<li>pattern matching with data constructors and regular expressions</li>
|
||||||
|
<li>module system with multiple inheritance and parametrized modules</li>
|
||||||
|
</ul>
|
||||||
|
|
||||||
|
<h3>Getting help</h3>
|
||||||
|
<p>
|
||||||
|
If you need some help with GF, the first places to start are the <a href="doc/tutorial/gf-tutorial.html">Tutorial</a> and <a href="doc/gf-refman.html">Reference</a> pages.
|
||||||
|
The printed <a href="gf-book">book</a> contains all the material in the tutorial and some extra bits, and is the recommended reference for GF.
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<p>
|
||||||
|
We run the IRC channel <strong><code>#gf</code></strong> on the Freenode network, where you are welcome to look for help with small questions or just start a general discussion.
|
||||||
|
You can <a href="https://webchat.freenode.net/?channels=gf">open a web chat</a>
|
||||||
|
or <a href="http://www.grammaticalframework.org/irc/">browse the channel logs</a>.
|
||||||
|
</p>
|
||||||
|
<p>
|
||||||
|
If you have a larger question which the community may benefit from, we recommend you ask it on the <a href="http://groups.google.com/group/gf-dev">mailing list</a>.
|
||||||
|
</p>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div class="col-md-6">
|
||||||
|
<h2>News</h2>
|
||||||
|
|
||||||
|
<dl class="row">
|
||||||
|
<dt class="col-sm-3 text-center text-nowrap">2018-12-03</dt>
|
||||||
|
<dd class="col-sm-9">
|
||||||
|
<a href="http://school.grammaticalframework.org/2018/">Sixth GF Summer School</a> in Stellenbosch (South Africa), 3–14 December 2018
|
||||||
|
</dd>
|
||||||
|
<dt class="col-sm-3 text-center text-nowrap">2018-12-02</dt>
|
||||||
|
<dd class="col-sm-9">
|
||||||
|
<strong>GF 3.10 released.</strong>
|
||||||
|
<a href="download/release-3.10.html">Release notes</a>
|
||||||
|
</dd>
|
||||||
|
<dt class="col-sm-3 text-center text-nowrap">2018-07-25</dt>
|
||||||
|
<dd class="col-sm-9">
|
||||||
|
The GF repository has been split in two:
|
||||||
|
<a href="https://github.com/GrammaticalFramework/gf-core">gf-core</a> and
|
||||||
|
<a href="https://github.com/GrammaticalFramework/gf-rgl">gf-rgl</a>.
|
||||||
|
The original <a href="https://github.com/GrammaticalFramework/GF">GF</a> repository is now archived.
|
||||||
|
</dd>
|
||||||
|
<dt class="col-sm-3 text-center text-nowrap">2017-08-11</dt>
|
||||||
|
<dd class="col-sm-9">
|
||||||
|
<strong>GF 3.9 released.</strong>
|
||||||
|
<a href="download/release-3.9.html">Release notes</a>
|
||||||
|
</dd>
|
||||||
|
<dt class="col-sm-3 text-center text-nowrap">2017-06-29</dt>
|
||||||
|
<dd class="col-sm-9">
|
||||||
|
GF is moving to <a href="https://github.com/GrammaticalFramework/GF/">GitHub</a>.</dd>
|
||||||
|
<dt class="col-sm-3 text-center text-nowrap">2017-03-13</dt>
|
||||||
|
<dd class="col-sm-9">
|
||||||
|
<a href="http://school.grammaticalframework.org/2017/">GF Summer School</a> in Riga (Latvia), 14-25 August 2017
|
||||||
|
</dd>
|
||||||
|
</dl>
|
||||||
|
|
||||||
|
<h2>Projects</h2>
|
||||||
|
<p>
|
||||||
|
GF was first created in 1998 at
|
||||||
|
<a href="http://www.xrce.xerox.com/">Xerox Research Centre Europe</a>,
|
||||||
|
Grenoble, in the project
|
||||||
|
Multilingual Document Authoring. At Xerox, it was used for prototypes including
|
||||||
|
a restaurant phrase book,
|
||||||
|
a database query system,
|
||||||
|
a formalization of an alarm system instructions with translations to 5 languages, and
|
||||||
|
an authoring system for medical drug descriptions.
|
||||||
|
</p>
|
||||||
|
<p>
|
||||||
|
Later projects using GF and involving third parties include, in chronological order:
|
||||||
|
</p>
|
||||||
|
<ul>
|
||||||
|
<li>
|
||||||
|
<a href="http://www.cs.chalmers.se/~hallgren/Alfa/Tutorial/GFplugin.html">GF-Alfa</a>:
|
||||||
|
natural language interface to formal proofs
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
<a href="http://efficient.citi.tudor.lu/index_noframe.html">Efficient</a>:
|
||||||
|
authoring tool for business models.
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
<a href="http://www.key-project.org/">GF-KeY</a>:
|
||||||
|
authoring and translation of software specifications
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
<a href="http://www.talk-project.org">TALK</a>:
|
||||||
|
multilingual and multimodal spoken dialogue systems
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
<a href="http://webalt.math.helsinki.fi/">WebALT</a>:
|
||||||
|
multilingual generation of mathematical exercises (commercial project)
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
<a href="http://spraakbanken.gu.se/sal/">SALDO</a>:
|
||||||
|
Swedish morphological dictionary based on rules developed for GF and
|
||||||
|
<a href="http://www.cse.chalmers.se/alumni/markus/FM/">Functional Morphology</a>
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
<a href="http://www.molto-project.eu">MOLTO</a>:
|
||||||
|
multilingual online translation
|
||||||
|
</li>
|
||||||
|
<li>
|
||||||
|
<a href="http://remu.grammaticalframework.org">REMU</a>:
|
||||||
|
reliable multilingual digital communication
|
||||||
|
</li>
|
||||||
|
</ul>
|
||||||
|
<!-- <p>
|
||||||
|
Here is a <a
|
||||||
|
href="http://videos.xrce.xerox.com/index.php/videos/index/618">talk
|
||||||
|
about GF at XRCE</a>,
|
||||||
|
14 years later.
|
||||||
|
</p> -->
|
||||||
|
|
||||||
|
<p>
|
||||||
|
Academically, GF has been used in at least ten PhD theses and resulted
|
||||||
|
in more than a hundred scientific publications.
|
||||||
|
<!-- (see <a href="doc/gf-bibliography.html">GF publication list</a>). -->
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<h2>Libraries</h2>
|
||||||
|
<p>
|
||||||
|
Libraries are at the heart of modern software engineering. In natural language
|
||||||
|
applications, libraries are a way to cope with thousands of details involved in
|
||||||
|
syntax, lexicon, and inflection. The
|
||||||
|
<a href="lib/doc/synopsis/index.html">GF resource grammar library</a> has
|
||||||
|
support for an increasing number of languages, currently including
|
||||||
|
Afrikaans,
|
||||||
|
Amharic (partial),
|
||||||
|
Arabic (partial),
|
||||||
|
Bulgarian,
|
||||||
|
Catalan,
|
||||||
|
Chinese,
|
||||||
|
Danish,
|
||||||
|
Dutch,
|
||||||
|
English,
|
||||||
|
Estonian,
|
||||||
|
Finnish,
|
||||||
|
French,
|
||||||
|
German,
|
||||||
|
Greek ancient (partial),
|
||||||
|
Greek modern,
|
||||||
|
Hebrew (fragments),
|
||||||
|
Hindi,
|
||||||
|
Interlingua,
|
||||||
|
Japanese,
|
||||||
|
Italian,
|
||||||
|
Latin (fragments),
|
||||||
|
Latvian,
|
||||||
|
Maltese,
|
||||||
|
Mongolian,
|
||||||
|
Nepali,
|
||||||
|
Norwegian bokmål,
|
||||||
|
Norwegian nynorsk,
|
||||||
|
Persian,
|
||||||
|
Polish,
|
||||||
|
Punjabi,
|
||||||
|
Romanian,
|
||||||
|
Russian,
|
||||||
|
Sindhi,
|
||||||
|
Slovene (partial),
|
||||||
|
Spanish,
|
||||||
|
Swahili (fragments),
|
||||||
|
Swedish,
|
||||||
|
Thai,
|
||||||
|
Turkish (fragments),
|
||||||
|
and
|
||||||
|
Urdu.
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<p>
|
||||||
|
Adding a language to the resource library takes 3 to 9
|
||||||
|
months - contributions
|
||||||
|
are welcome! You can start with the <a href="lib/doc/rgl-tutorial/index.html">resource grammarian's tutorial</a>.
|
||||||
|
</p>
|
||||||
|
|
||||||
|
</div><!-- .col-6 -->
|
||||||
|
|
||||||
|
</div><!-- .row -->
|
||||||
|
|
||||||
|
</div><!-- .container -->
|
||||||
|
|
||||||
|
<footer class="bg-light mt-5 py-4">
|
||||||
|
<div class="container mb-3">
|
||||||
|
<div class="text-center text-muted">
|
||||||
|
<img style="height:50px; filter: opacity(.5) grayscale(1);" class="mb-3" src="doc/Logos/gf0.svg" alt="GF Logo"><br>
|
||||||
|
Grammatical Framework is free and open source,<br>
|
||||||
|
with some support from <a href="https://www.digitalgrammars.com/">Digital Grammars AB</a>.
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</footer>
|
||||||
|
|
||||||
<script type="text/javascript">
|
<script type="text/javascript">
|
||||||
var gaJsHost = (("https:" == document.location.protocol) ? "https://ssl." : "http://www.");
|
var gaJsHost = (("https:" == document.location.protocol) ? "https://ssl." : "http://www.");
|
||||||
@@ -423,5 +395,6 @@ try {
|
|||||||
var pageTracker = _gat._getTracker("UA-7811807-3");
|
var pageTracker = _gat._getTracker("UA-7811807-3");
|
||||||
pageTracker._trackPageview();
|
pageTracker._trackPageview();
|
||||||
} catch(err) {}</script>
|
} catch(err) {}</script>
|
||||||
</BODY>
|
|
||||||
</HTML>
|
</body>
|
||||||
|
</html>
|
||||||
|
|||||||
@@ -19,7 +19,9 @@ module GF(
|
|||||||
module GF.Grammar.Printer,
|
module GF.Grammar.Printer,
|
||||||
module GF.Infra.Ident,
|
module GF.Infra.Ident,
|
||||||
-- ** Binary serialisation
|
-- ** Binary serialisation
|
||||||
module GF.Grammar.Binary
|
module GF.Grammar.Binary,
|
||||||
|
-- * Canonical GF
|
||||||
|
module GF.Compile.GrammarToCanonical
|
||||||
) where
|
) where
|
||||||
import GF.Main
|
import GF.Main
|
||||||
import GF.Compiler
|
import GF.Compiler
|
||||||
@@ -36,3 +38,5 @@ import GF.Grammar.Macros
|
|||||||
import GF.Grammar.Printer
|
import GF.Grammar.Printer
|
||||||
import GF.Infra.Ident
|
import GF.Infra.Ident
|
||||||
import GF.Grammar.Binary
|
import GF.Grammar.Binary
|
||||||
|
|
||||||
|
import GF.Compile.GrammarToCanonical
|
||||||
|
|||||||
@@ -147,11 +147,17 @@ checkCompleteGrammar opts cwd gr (am,abs) (cm,cnc) = checkInModule cwd cnc NoLoc
|
|||||||
return $ updateTree (c,CncFun (Just linty) d mn mf) js
|
return $ updateTree (c,CncFun (Just linty) d mn mf) js
|
||||||
_ -> do checkWarn ("function" <+> c <+> "is not in abstract")
|
_ -> do checkWarn ("function" <+> c <+> "is not in abstract")
|
||||||
return js
|
return js
|
||||||
CncCat _ _ _ _ _ -> case lookupOrigInfo gr (am,c) of
|
CncCat {} ->
|
||||||
Ok _ -> return $ updateTree i js
|
case lookupOrigInfo gr (am,c) of
|
||||||
_ -> do checkWarn ("category" <+> c <+> "is not in abstract")
|
Ok (_,AbsCat _) -> return $ updateTree i js
|
||||||
return js
|
{- -- This might be too pedantic:
|
||||||
_ -> return $ updateTree i js
|
Ok (_,AbsFun {}) ->
|
||||||
|
checkError ("lincat:"<+>c<+>"is a fun, not a cat")
|
||||||
|
-}
|
||||||
|
_ -> do checkWarn ("category" <+> c <+> "is not in abstract")
|
||||||
|
return js
|
||||||
|
|
||||||
|
_ -> return $ updateTree i js
|
||||||
|
|
||||||
|
|
||||||
-- | General Principle: only Just-values are checked.
|
-- | General Principle: only Just-values are checked.
|
||||||
|
|||||||
@@ -1,365 +1,351 @@
|
|||||||
-- | Translate concrete syntax to Haskell
|
-- | Translate concrete syntax to Haskell
|
||||||
module GF.Compile.ConcreteToHaskell(concretes2haskell,concrete2haskell) where
|
module GF.Compile.ConcreteToHaskell(concretes2haskell,concrete2haskell) where
|
||||||
import Data.List(sort,sortBy)
|
import Data.List(isPrefixOf,sort,sortOn)
|
||||||
import Data.Function(on)
|
|
||||||
import qualified Data.Map as M
|
import qualified Data.Map as M
|
||||||
import qualified Data.Set as S
|
import qualified Data.Set as S
|
||||||
import GF.Data.ErrM
|
|
||||||
import GF.Data.Utilities(mapSnd)
|
|
||||||
import GF.Text.Pretty
|
import GF.Text.Pretty
|
||||||
import GF.Grammar.Grammar
|
--import GF.Grammar.Predef(cPredef,cInts)
|
||||||
import GF.Grammar.Lookup(lookupFunType,lookupOrigInfo,allOrigInfos)--,allParamValues
|
--import GF.Compile.Compute.Predef(predef)
|
||||||
import GF.Grammar.Macros(typeForm,collectOp,collectPattOp,mkAbs,mkApp)
|
--import GF.Compile.Compute.Value(Predefined(..))
|
||||||
import GF.Grammar.Lockfield(isLockLabel)
|
import GF.Infra.Ident(Ident,identS,identW,prefixIdent)
|
||||||
import GF.Grammar.Predef(cPredef,cInts)
|
|
||||||
import GF.Compile.Compute.Predef(predef)
|
|
||||||
import GF.Compile.Compute.Value(Predefined(..))
|
|
||||||
import GF.Infra.Ident(Ident,identS,prefixIdent) --,moduleNameS
|
|
||||||
import GF.Infra.Option
|
import GF.Infra.Option
|
||||||
import GF.Compile.Compute.ConcreteNew(normalForm,resourceValues)
|
import GF.Haskell as H
|
||||||
import GF.Haskell
|
import GF.Grammar.Canonical as C
|
||||||
import Debug.Trace
|
import GF.Compile.GrammarToCanonical
|
||||||
|
import Debug.Trace(trace)
|
||||||
|
|
||||||
-- | Generate Haskell code for the all concrete syntaxes associated with
|
-- | Generate Haskell code for the all concrete syntaxes associated with
|
||||||
-- the named abstract syntax in given the grammar.
|
-- the named abstract syntax in given the grammar.
|
||||||
concretes2haskell opts absname gr =
|
concretes2haskell opts absname gr =
|
||||||
[(cncname,concrete2haskell opts gr cenv absname cnc cncmod)
|
[(filename,render80 $ concrete2haskell opts abstr cncmod)
|
||||||
| let cenv = resourceValues opts gr,
|
| let Grammar abstr cncs = grammar2canonical opts absname gr,
|
||||||
cnc<-allConcretes gr absname,
|
cncmod<-cncs,
|
||||||
let cncname = render cnc ++ ".hs" :: FilePath
|
let ModId name = concName cncmod
|
||||||
Ok cncmod = lookupModule gr cnc
|
filename = name ++ ".hs" :: FilePath
|
||||||
]
|
]
|
||||||
|
|
||||||
-- | Generate Haskell code for the given concrete module.
|
-- | Generate Haskell code for the given concrete module.
|
||||||
-- The only options that make a difference are
|
-- The only options that make a difference are
|
||||||
-- @-haskell=noprefix@ and @-haskell=variants@.
|
-- @-haskell=noprefix@ and @-haskell=variants@.
|
||||||
concrete2haskell opts gr cenv absname cnc modinfo =
|
concrete2haskell opts
|
||||||
renderStyle style{lineLength=80,ribbonsPerLine=1} $
|
abstr@(Abstract _ _ cats funs)
|
||||||
haskPreamble va absname cnc $$ vcat (
|
modinfo@(Concrete cnc absname _ ps lcs lns) =
|
||||||
nl:Comment "--- Parameter types ---":
|
haskPreamble absname cnc $$
|
||||||
neededParamTypes S.empty (params defs) ++
|
vcat (
|
||||||
nl:Comment "--- Type signatures for linearization functions ---":
|
nl:Comment "--- Parameter types ---":
|
||||||
map signature (S.toList allcats)++
|
map paramDef ps ++
|
||||||
nl:Comment "--- Linearization functions for empty categories ---":
|
nl:Comment "--- Type signatures for linearization functions ---":
|
||||||
emptydefs ++
|
map signature cats ++
|
||||||
nl:Comment "--- Linearization types and linearization functions ---":
|
nl:Comment "--- Linearization functions for empty categories ---":
|
||||||
map ppDef defs ++
|
emptydefs ++
|
||||||
nl:Comment "--- Type classes for projection functions ---":
|
nl:Comment "--- Linearization types ---":
|
||||||
map labelClass (S.toList labels) ++
|
map lincatDef lcs ++
|
||||||
nl:Comment "--- Record types ---":
|
nl:Comment "--- Linearization functions ---":
|
||||||
concatMap recordType recs)
|
lindefs ++
|
||||||
|
nl:Comment "--- Type classes for projection functions ---":
|
||||||
|
map labelClass (S.toList labels) ++
|
||||||
|
nl:Comment "--- Record types ---":
|
||||||
|
concatMap recordType recs)
|
||||||
where
|
where
|
||||||
nl = Comment ""
|
nl = Comment ""
|
||||||
|
recs = S.toList (S.difference (records (lcs,lns)) common_records)
|
||||||
|
|
||||||
labels = S.difference (S.unions (map S.fromList recs)) common_labels
|
labels = S.difference (S.unions (map S.fromList recs)) common_labels
|
||||||
recs = S.toList (S.difference (records rhss) common_records)
|
|
||||||
common_records = S.fromList [[label_s]]
|
common_records = S.fromList [[label_s]]
|
||||||
common_labels = S.fromList [label_s]
|
common_labels = S.fromList [label_s]
|
||||||
label_s = ident2label (identS "s")
|
label_s = LabelId "s"
|
||||||
|
|
||||||
rhss = map (either snd (snd.snd)) defs
|
signature (CatDef c _) = TypeSig lf (Fun abs (pure lin))
|
||||||
defs = sortBy (compare `on` either (const Nothing) (Just . fst)) .
|
|
||||||
concatMap (toHaskell gId gr absname cenv) .
|
|
||||||
M.toList $
|
|
||||||
jments modinfo
|
|
||||||
|
|
||||||
-- signature c = "lin"<>c<+>"::"<+>"A."<>gId c<+>"->"<+>"Lin"<>c
|
|
||||||
-- signature c = "--lin"<>c<+>":: (Applicative f,Monad f) =>"<+>"A."<>gId c<+>"->"<+>"f Lin"<>c
|
|
||||||
signature c = TypeSig lf (Fun abs (pure lin))
|
|
||||||
where
|
where
|
||||||
abs = tcon0 (prefixIdent "A." (gId c))
|
abs = tcon0 (prefixIdent "A." (gId c))
|
||||||
lin = tcon0 lc
|
lin = tcon0 lc
|
||||||
lf = prefixIdent "lin" c
|
lf = linfunName c
|
||||||
lc = prefixIdent "Lin" c
|
lc = lincatName c
|
||||||
|
|
||||||
emptydefs = map emptydef (S.toList emptyCats)
|
emptydefs = map emptydef (S.toList emptyCats)
|
||||||
emptydef c = Eqn (prefixIdent "lin" c,[WildP]) (Const "undefined")
|
emptydef c = Eqn (linfunName c,[WildP]) (Const "undefined")
|
||||||
|
|
||||||
emptyCats = allcats `S.difference` cats
|
emptyCats = allcats `S.difference` linfuncats
|
||||||
cats = S.fromList [c|Right (c,_)<-defs]
|
where
|
||||||
allcats = S.fromList [c|((_,c),AbsCat (Just _))<-allOrigInfos gr absname]
|
--funcats = S.fromList [c | FunDef f (C.Type _ (TypeApp c _))<-funs]
|
||||||
|
allcats = S.fromList [c | CatDef c _<-cats]
|
||||||
|
|
||||||
params = S.toList . S.unions . map params1
|
gId :: ToIdent i => i -> Ident
|
||||||
params1 (Left (_,rhs)) = paramTypes gr rhs
|
gId = (if haskellOption opts HaskellNoPrefix then id else prefixIdent "G")
|
||||||
params1 (Right (_,(_,rhs))) = tableTypes gr [rhs]
|
. toIdent
|
||||||
|
|
||||||
ppDef (Left (lhs,rhs)) = lhs (convType va gId rhs)
|
|
||||||
ppDef (Right (_,(lhs,rhs))) = lhs (convert va gId gr rhs)
|
|
||||||
|
|
||||||
gId :: Ident -> Ident
|
|
||||||
gId = if haskellOption opts HaskellNoPrefix then id else prefixIdent "G"
|
|
||||||
va = haskellOption opts HaskellVariants
|
va = haskellOption opts HaskellVariants
|
||||||
pure = if va then ListT else id
|
pure = if va then ListT else id
|
||||||
|
|
||||||
neededParamTypes have [] = []
|
haskPreamble :: ModId -> ModId -> Doc
|
||||||
neededParamTypes have (q:qs) =
|
haskPreamble absname cncname =
|
||||||
if q `S.member` have
|
"{-# LANGUAGE MultiParamTypeClasses, FunctionalDependencies, FlexibleInstances, LambdaCase #-}" $$
|
||||||
then neededParamTypes have qs
|
"module" <+> cncname <+> "where" $$
|
||||||
else let ((got,need),def) = paramType va gId gr q
|
"import Prelude hiding (Ordering(..))" $$
|
||||||
in def++neededParamTypes (S.union got have) (S.toList need++qs)
|
"import Control.Applicative((<$>),(<*>))" $$
|
||||||
|
"import PGF.Haskell" $$
|
||||||
haskPreamble :: Bool -> ModuleName -> ModuleName -> Doc
|
"import qualified" <+> absname <+> "as A" $$
|
||||||
haskPreamble va absname cncname =
|
"" $$
|
||||||
"{-# LANGUAGE MultiParamTypeClasses, FunctionalDependencies, FlexibleInstances, LambdaCase #-}" $$
|
"--- Standard definitions ---" $$
|
||||||
"module" <+> cncname <+> "where" $$
|
"linString (A.GString s) ="<+>pure "R_s [TK s]" $$
|
||||||
"import Prelude hiding (Ordering(..))" $$
|
"linInt (A.GInt i) ="<+>pure "R_s [TK (show i)]" $$
|
||||||
"import Control.Applicative((<$>),(<*>))" $$
|
"linFloat (A.GFloat x) ="<+>pure "R_s [TK (show x)]" $$
|
||||||
"import PGF.Haskell" $$
|
"" $$
|
||||||
"import qualified" <+> absname <+> "as A" $$
|
"----------------------------------------------------" $$
|
||||||
"" $$
|
"-- Automatic translation from GF to Haskell follows" $$
|
||||||
"--- Standard definitions ---" $$
|
"----------------------------------------------------"
|
||||||
"linString (A.GString s) ="<+>pure "R_s [TK s]" $$
|
|
||||||
"linInt (A.GInt i) ="<+>pure "R_s [TK (show i)]" $$
|
|
||||||
"linFloat (A.GFloat x) ="<+>pure "R_s [TK (show x)]" $$
|
|
||||||
"" $$
|
|
||||||
"----------------------------------------------------" $$
|
|
||||||
"-- Automatic translation from GF to Haskell follows" $$
|
|
||||||
"----------------------------------------------------"
|
|
||||||
where
|
|
||||||
pure = if va then brackets else pp
|
|
||||||
|
|
||||||
toHaskell gId gr absname cenv (name,jment) =
|
|
||||||
case jment of
|
|
||||||
CncCat (Just (L loc typ)) _ _ pprn _ ->
|
|
||||||
[Left (tsyn0 (prefixIdent "Lin" name),nf loc typ)]
|
|
||||||
CncFun (Just r@(cat,ctx,lincat)) (Just (L loc def)) pprn _ ->
|
|
||||||
-- trace (render (name<+>hcat[parens (x<>"::"<>t)|(_,x,t)<-ctx]<+>"::"<+>cat)) $
|
|
||||||
[Right (cat,(Eqn (prefixIdent "lin" cat,lhs),coerce [] lincat rhs))]
|
|
||||||
where
|
where
|
||||||
Ok abstype = lookupFunType gr absname name
|
pure = if va then brackets else pp
|
||||||
(absctx,_abscat,_absargs) = typeForm abstype
|
|
||||||
|
|
||||||
e' = unAbs (length params) $
|
paramDef pd =
|
||||||
nf loc (mkAbs params (mkApp def (map Vr args)))
|
case pd of
|
||||||
params = [(b,prefixIdent "g" x)|(b,x,_)<-ctx]
|
ParamAliasDef p t -> H.Type (conap0 (gId p)) (convLinType t)
|
||||||
args = map snd params
|
ParamDef p pvs -> Data (conap0 (gId p)) (map paramCon pvs) derive
|
||||||
abs_args = map (prefixIdent "abs_") args
|
where
|
||||||
lhs = [ConP (aId name) (map VarP abs_args)]
|
paramCon (Param c cs) = ConAp (gId c) (map (tcon0.gId) cs)
|
||||||
rhs = foldr letlin e' (zip args absctx)
|
derive = ["Eq","Ord","Show"]
|
||||||
letlin (a,(_,_,at)) =
|
|
||||||
Let (a,(Just (con ("Lin"++render at)),(App (con ("lin"++render at)) (con ("abs_"++render a)))))
|
|
||||||
AnyInd _ m -> case lookupOrigInfo gr (m,name) of
|
|
||||||
Ok (m,jment) -> toHaskell gId gr absname cenv (name,jment)
|
|
||||||
_ -> []
|
|
||||||
_ -> []
|
|
||||||
where
|
|
||||||
nf loc = normalForm cenv (L loc name)
|
|
||||||
aId n = prefixIdent "A." (gId n)
|
|
||||||
|
|
||||||
unAbs 0 t = t
|
convLinType = ppT
|
||||||
unAbs n (Abs _ _ t) = unAbs (n-1) t
|
where
|
||||||
unAbs _ t = t
|
ppT t =
|
||||||
|
case t of
|
||||||
|
FloatType -> tcon0 (identS "Float")
|
||||||
|
IntType -> tcon0 (identS "Int")
|
||||||
|
ParamType (ParamTypeId p) -> tcon0 (gId p)
|
||||||
|
RecordType rs -> tcon (rcon' ls) (map ppT ts)
|
||||||
|
where (ls,ts) = unzip $ sortOn fst [(l,t)|RecordRow l t<-rs]
|
||||||
|
StrType -> tcon0 (identS "Str")
|
||||||
|
TableType pt lt -> Fun (ppT pt) (ppT lt)
|
||||||
|
-- TupleType lts ->
|
||||||
|
|
||||||
|
lincatDef (LincatDef c t) = tsyn0 (lincatName c) (convLinType t)
|
||||||
|
|
||||||
|
linfuncats = S.fromList linfuncatl
|
||||||
|
(linfuncatl,lindefs) = unzip (linDefs lns)
|
||||||
|
|
||||||
|
linDefs = map eqn . sortOn fst . map linDef
|
||||||
|
where eqn (cat,(f,(ps,rhs))) = (cat,Eqn (f,ps) rhs)
|
||||||
|
|
||||||
|
linDef (LinDef f xs rhs0) =
|
||||||
|
(cat,(linfunName cat,(lhs,rhs)))
|
||||||
|
where
|
||||||
|
lhs = [ConP (aId f) (map VarP abs_args)]
|
||||||
|
aId f = prefixIdent "A." (gId f)
|
||||||
|
|
||||||
|
[lincat] = [lincat | LincatDef c lincat<-lcs,c==cat]
|
||||||
|
[C.Type absctx (TypeApp cat _)] = [t | FunDef f' t<-funs, f'==f]
|
||||||
|
|
||||||
|
abs_args = map abs_arg args
|
||||||
|
abs_arg = prefixIdent "abs_"
|
||||||
|
args = map (prefixIdent "g" . toIdent) xs
|
||||||
|
|
||||||
|
rhs = lets (zipWith letlin args absctx)
|
||||||
|
(convert vs (coerce env lincat rhs0))
|
||||||
|
where
|
||||||
|
vs = [(VarValueId (Unqual x),a)|(VarId x,a)<-zip xs args]
|
||||||
|
env= [(VarValueId (Unqual x),lc)|(VarId x,lc)<-zip xs (map arglincat absctx)]
|
||||||
|
|
||||||
|
letlin a (TypeBinding _ (C.Type _ (TypeApp acat _))) =
|
||||||
|
(a,Ap (Var (linfunName acat)) (Var (abs_arg a)))
|
||||||
|
|
||||||
|
arglincat (TypeBinding _ (C.Type _ (TypeApp acat _))) = lincat
|
||||||
|
where
|
||||||
|
[lincat] = [lincat | LincatDef c lincat<-lcs,c==acat]
|
||||||
|
|
||||||
|
convert = convert' va
|
||||||
|
|
||||||
|
convert' va vs = ppT
|
||||||
|
where
|
||||||
|
ppT0 = convert' False vs
|
||||||
|
ppTv vs' = convert' va vs'
|
||||||
|
|
||||||
|
pure = if va then single else id
|
||||||
|
|
||||||
|
ppT t =
|
||||||
|
case t of
|
||||||
|
TableValue ty cs -> pure (table cs)
|
||||||
|
Selection t p -> select (ppT t) (ppT p)
|
||||||
|
ConcatValue t1 t2 -> concat (ppT t1) (ppT t2)
|
||||||
|
RecordValue r -> aps (rcon ls) (map ppT ts)
|
||||||
|
where (ls,ts) = unzip $ sortOn fst [(l,t)|RecordRow l t<-r]
|
||||||
|
PredefValue p -> single (Var (toIdent p)) -- hmm
|
||||||
|
Projection t l -> ap (proj l) (ppT t)
|
||||||
|
VariantValue [] -> empty
|
||||||
|
VariantValue ts@(_:_) -> variants ts
|
||||||
|
VarValue x -> maybe (Var (gId x)) (pure . Var) $ lookup x vs
|
||||||
|
PreValue vs t' -> pure (alts t' vs)
|
||||||
|
ParamConstant (Param c vs) -> aps (Var (pId c)) (map ppT vs)
|
||||||
|
ErrorValue s -> ap (Const "error") (Const (show s)) -- !!
|
||||||
|
LiteralValue l -> ppL l
|
||||||
|
_ -> error ("convert "++show t)
|
||||||
|
|
||||||
|
ppL l =
|
||||||
|
case l of
|
||||||
|
FloatConstant x -> pure (lit x)
|
||||||
|
IntConstant n -> pure (lit n)
|
||||||
|
StrConstant s -> pure (token s)
|
||||||
|
|
||||||
|
pId p@(ParamId s) =
|
||||||
|
if "to_R_" `isPrefixOf` unqual s then toIdent p else gId p -- !! a hack
|
||||||
|
|
||||||
|
table cs =
|
||||||
|
if all (null.patVars) ps
|
||||||
|
then lets ds (LambdaCase [(ppP p,t')|(p,t')<-zip ps ts'])
|
||||||
|
else LambdaCase (map ppCase cs)
|
||||||
|
where
|
||||||
|
(ds,ts') = dedup ts
|
||||||
|
(ps,ts) = unzip [(p,t)|TableRow p t<-cs]
|
||||||
|
ppCase (TableRow p t) = (ppP p,ppTv (patVars p++vs) t)
|
||||||
|
{-
|
||||||
|
ppPredef n =
|
||||||
|
case predef n of
|
||||||
|
Ok BIND -> single (c "BIND")
|
||||||
|
Ok SOFT_BIND -> single (c "SOFT_BIND")
|
||||||
|
Ok SOFT_SPACE -> single (c "SOFT_SPACE")
|
||||||
|
Ok CAPIT -> single (c "CAPIT")
|
||||||
|
Ok ALL_CAPIT -> single (c "ALL_CAPIT")
|
||||||
|
_ -> Var n
|
||||||
|
-}
|
||||||
|
ppP p =
|
||||||
|
case p of
|
||||||
|
ParamPattern (Param c ps) -> ConP (gId c) (map ppP ps)
|
||||||
|
RecordPattern r -> ConP (rcon' ls) (map ppP ps)
|
||||||
|
where (ls,ps) = unzip $ sortOn fst [(l,p)|RecordRow l p<-r]
|
||||||
|
WildPattern -> WildP
|
||||||
|
|
||||||
|
token s = single (c "TK" `Ap` lit s)
|
||||||
|
|
||||||
|
alts t' vs = single (c "TP" `Ap` List (map alt vs) `Ap` ppT0 t')
|
||||||
|
where
|
||||||
|
alt (s,t) = Pair (List (pre s)) (ppT0 t)
|
||||||
|
pre s = map lit s
|
||||||
|
|
||||||
|
c = Const
|
||||||
|
lit s = c (show s) -- hmm
|
||||||
|
concat = if va then concat' else plusplus
|
||||||
|
where
|
||||||
|
concat' (List [List ts1]) (List [List ts2]) = List [List (ts1++ts2)]
|
||||||
|
concat' t1 t2 = Op t1 "+++" t2
|
||||||
|
|
||||||
|
pure' = single -- forcing the list monad
|
||||||
|
|
||||||
|
select = if va then select' else Ap
|
||||||
|
select' (List [t]) (List [p]) = Op t "!" p
|
||||||
|
select' (List [t]) p = Op t "!$" p
|
||||||
|
select' t p = Op t "!*" p
|
||||||
|
|
||||||
|
ap = if va then ap' else Ap
|
||||||
|
where
|
||||||
|
ap' (List [f]) x = fmap f x
|
||||||
|
ap' f x = Op f "<*>" x
|
||||||
|
fmap f (List [x]) = pure' (Ap f x)
|
||||||
|
fmap f x = Op f "<$>" x
|
||||||
|
|
||||||
|
-- join = if va then join' else id
|
||||||
|
join' (List [x]) = x
|
||||||
|
join' x = c "concat" `Ap` x
|
||||||
|
|
||||||
|
empty = if va then List [] else c "error" `Ap` c (show "empty variant")
|
||||||
|
variants = if va then \ ts -> join' (List (map ppT ts))
|
||||||
|
else \ (t:_) -> ppT t
|
||||||
|
|
||||||
|
aps f [] = f
|
||||||
|
aps f (a:as) = aps (ap f a) as
|
||||||
|
|
||||||
|
dedup ts =
|
||||||
|
if M.null dups
|
||||||
|
then ([],map ppT ts)
|
||||||
|
else ([(ev i,ppT t)|(i,t)<-defs],zipWith entry ts is)
|
||||||
|
where
|
||||||
|
entry t i = maybe (ppT t) (Var . ev) (M.lookup i dups)
|
||||||
|
ev i = identS ("e'"++show i)
|
||||||
|
|
||||||
|
defs = [(i1,t)|(t,i1:_:_)<-ms]
|
||||||
|
dups = M.fromList [(i2,i1)|(_,i1:is@(_:_))<-ms,i2<-i1:is]
|
||||||
|
ms = M.toList m
|
||||||
|
m = fmap sort (M.fromListWith (++) (zip ts [[i]|i<-is]))
|
||||||
|
is = [0..]::[Int]
|
||||||
|
|
||||||
|
|
||||||
con = Cn . identS
|
--con = Cn . identS
|
||||||
|
|
||||||
tableTypes gr ts = S.unions (map tabtys ts)
|
class Records t where
|
||||||
where
|
records :: t -> S.Set [LabelId]
|
||||||
tabtys t =
|
|
||||||
case t of
|
|
||||||
V t cc -> S.union (paramTypes gr t) (tableTypes gr cc)
|
|
||||||
T (TTyped t) cs -> S.union (paramTypes gr t) (tableTypes gr (map snd cs))
|
|
||||||
_ -> collectOp tabtys t
|
|
||||||
|
|
||||||
paramTypes gr t =
|
instance Records t => Records [t] where
|
||||||
case t of
|
records = S.unions . map records
|
||||||
RecType fs -> S.unions (map (paramTypes gr.snd) fs)
|
|
||||||
Table t1 t2 -> S.union (paramTypes gr t1) (paramTypes gr t2)
|
|
||||||
App tf ta -> S.union (paramTypes gr tf) (paramTypes gr ta)
|
|
||||||
Sort _ -> S.empty
|
|
||||||
EInt _ -> S.empty
|
|
||||||
Q q -> lookup q
|
|
||||||
QC q -> lookup q
|
|
||||||
FV ts -> S.unions (map (paramTypes gr) ts)
|
|
||||||
_ -> ignore
|
|
||||||
where
|
|
||||||
lookup q = case lookupOrigInfo gr q of
|
|
||||||
Ok (_,ResOper _ (Just (L _ t))) ->
|
|
||||||
S.insert q (paramTypes gr t)
|
|
||||||
Ok (_,ResParam {}) -> S.singleton q
|
|
||||||
_ -> ignore
|
|
||||||
|
|
||||||
ignore = trace ("Ignore: "++show t) S.empty
|
instance (Records t1,Records t2) => Records (t1,t2) where
|
||||||
|
records (t1,t2) = S.union (records t1) (records t2)
|
||||||
|
|
||||||
|
instance Records LincatDef where
|
||||||
records ts = S.unions (map recs ts)
|
records (LincatDef _ lt) = records lt
|
||||||
where
|
|
||||||
recs t =
|
instance Records LinDef where
|
||||||
case t of
|
records (LinDef _ _ lv) = records lv
|
||||||
R r -> S.insert (labels r) (records (map (snd.snd) r))
|
|
||||||
RecType r -> S.insert (labels r) (records (map snd r))
|
instance Records LinType where
|
||||||
_ -> collectOp recs t
|
records t =
|
||||||
|
case t of
|
||||||
labels = sort . filter (not . isLockLabel) . map fst
|
RecordType r -> rowRecords r
|
||||||
|
TableType pt lt -> records (pt,lt)
|
||||||
|
TupleType ts -> records ts
|
||||||
|
_ -> S.empty
|
||||||
|
|
||||||
|
rowRecords r = S.insert (sort ls) (records ts)
|
||||||
|
where (ls,ts) = unzip [(l,t)|RecordRow l t<-r]
|
||||||
|
|
||||||
|
instance Records LinValue where
|
||||||
|
records v =
|
||||||
|
case v of
|
||||||
|
ConcatValue v1 v2 -> records (v1,v2)
|
||||||
|
ParamConstant (Param c vs) -> records vs
|
||||||
|
RecordValue r -> rowRecords r
|
||||||
|
TableValue t r -> records (t,r)
|
||||||
|
TupleValue vs -> records vs
|
||||||
|
VariantValue vs -> records vs
|
||||||
|
PreValue alts d -> records (map snd alts,d)
|
||||||
|
Projection v l -> records v
|
||||||
|
Selection v1 v2 -> records (v1,v2)
|
||||||
|
_ -> S.empty
|
||||||
|
|
||||||
|
instance Records rhs => Records (TableRow rhs) where
|
||||||
|
records (TableRow _ v) = records v
|
||||||
|
|
||||||
|
|
||||||
|
-- | Record subtyping is converted into explicit coercions in Haskell
|
||||||
coerce env ty t =
|
coerce env ty t =
|
||||||
case (ty,t) of
|
case (ty,t) of
|
||||||
(_,Let d t) -> Let d (coerce (extend env d) ty t)
|
(_,VariantValue ts) -> VariantValue (map (coerce env ty) ts)
|
||||||
(_,FV ts) -> FV (map (coerce env ty) ts)
|
(TableType ti tv,TableValue _ cs) ->
|
||||||
(Table ti tv,V _ ts) -> V ti (map (coerce env tv) ts)
|
TableValue ti [TableRow p (coerce env tv t)|TableRow p t<-cs]
|
||||||
(Table ti tv,T (TTyped _) cs) -> T (TTyped ti) (mapSnd (coerce env tv) cs)
|
(RecordType rt,RecordValue r) ->
|
||||||
(RecType rt,R r) ->
|
RecordValue [RecordRow l (coerce env ft f) |
|
||||||
R [(l,(Just ft,coerce env ft f))|(l,(_,f))<-r,Just ft<-[lookup l rt]]
|
RecordRow l f<-r,ft<-[ft|RecordRow l' ft<-rt,l'==l]]
|
||||||
(RecType rt,Vr x)->
|
(RecordType rt,VarValue x)->
|
||||||
case lookup x env of
|
case lookup x env of
|
||||||
Just ty' | ty'/=ty -> -- better to compare to normal form of ty'
|
Just ty' | ty'/=ty -> -- better to compare to normal form of ty'
|
||||||
--trace ("coerce "++render ty'++" to "++render ty) $
|
--trace ("coerce "++render ty'++" to "++render ty) $
|
||||||
App (to_rcon (map fst rt)) t
|
app (to_rcon rt) [t]
|
||||||
_ -> trace ("no coerce to "++render ty) t
|
| otherwise -> t -- types match, no coercion needed
|
||||||
|
_ -> trace (render ("missing type to coerce"<+>x<+>"to"<+>render ty
|
||||||
|
$$ "in" <+> map fst env))
|
||||||
|
t
|
||||||
_ -> t
|
_ -> t
|
||||||
where
|
where
|
||||||
extend env (x,(Just ty,rhs)) = (x,ty):env
|
app f ts = ParamConstant (Param f ts) -- !! a hack
|
||||||
extend env _ = env
|
to_rcon = ParamId . Unqual . to_rcon' . labels
|
||||||
|
|
||||||
convert va gId gr = convert' va gId [] gr
|
patVars p = []
|
||||||
|
|
||||||
convert' va gId vs gr = ppT
|
labels r = [l|RecordRow l _<-r]
|
||||||
where
|
|
||||||
ppT0 = convert' False gId vs gr
|
|
||||||
ppTv vs' = convert' va gId vs' gr
|
|
||||||
|
|
||||||
ppT t =
|
proj = Var . identS . proj'
|
||||||
case t of
|
proj' (LabelId l) = "proj_"++l
|
||||||
-- Only for 'let' inserted on the top-level by this converter:
|
rcon = Var . rcon'
|
||||||
Let (x,(_,xt)) t -> let1 x (ppT0 xt) (ppT t)
|
|
||||||
-- Abs b x t -> ...
|
|
||||||
V ty ts -> pure (c "table" `Ap` dedup ts)
|
|
||||||
T (TTyped ty) cs -> pure (LambdaCase (map ppCase cs))
|
|
||||||
S t p -> select (ppT t) (ppT p)
|
|
||||||
C t1 t2 -> concat (ppT t1) (ppT t2)
|
|
||||||
App f a -> ap (ppT f) (ppT a)
|
|
||||||
R r -> aps (ppT (rcon (map fst r))) (fields r)
|
|
||||||
P t l -> ap (ppT (proj l)) (ppT t)
|
|
||||||
FV [] -> empty
|
|
||||||
Vr x -> if x `elem` vs then pure (Var x) else Var x
|
|
||||||
Cn x -> pure (Var x)
|
|
||||||
Con c -> pure (Var (gId c))
|
|
||||||
Sort k -> pure (Var k)
|
|
||||||
EInt n -> pure (lit n)
|
|
||||||
Q (m,n) -> if m==cPredef then pure (ppPredef n) else Var (qual m n)
|
|
||||||
QC (m,n) -> pure (Var (gId (qual m n)))
|
|
||||||
K s -> pure (token s)
|
|
||||||
Empty -> pure (List [])
|
|
||||||
FV ts@(_:_) -> variants ts
|
|
||||||
Alts t' vs -> pure (alts t' vs)
|
|
||||||
|
|
||||||
ppCase (p,t) = (ppP p,ppTv (patVars p++vs) t)
|
|
||||||
|
|
||||||
ppPredef n =
|
|
||||||
case predef n of
|
|
||||||
Ok BIND -> single (c "BIND")
|
|
||||||
Ok SOFT_BIND -> single (c "SOFT_BIND")
|
|
||||||
Ok SOFT_SPACE -> single (c "SOFT_SPACE")
|
|
||||||
Ok CAPIT -> single (c "CAPIT")
|
|
||||||
Ok ALL_CAPIT -> single (c "ALL_CAPIT")
|
|
||||||
_ -> Var n
|
|
||||||
|
|
||||||
ppP p =
|
|
||||||
case p of
|
|
||||||
PC c ps -> ConP (gId c) (map ppP ps)
|
|
||||||
PP (_,c) ps -> ConP (gId c) (map ppP ps)
|
|
||||||
PR r -> ConP (rcon' (map fst r)) (map (ppP.snd) (filter (not.isLockLabel.fst) r))
|
|
||||||
PW -> WildP
|
|
||||||
PV x -> VarP x
|
|
||||||
PString s -> Lit (show s) -- !!
|
|
||||||
PInt i -> Lit (show i)
|
|
||||||
PFloat x -> Lit (show x)
|
|
||||||
PT _ p -> ppP p
|
|
||||||
PAs x p -> AsP x (ppP p)
|
|
||||||
|
|
||||||
token s = single (c "TK" `Ap` lit s)
|
|
||||||
|
|
||||||
alts t' vs = single (c "TP" `Ap` List (map alt vs) `Ap` ppT0 t')
|
|
||||||
where
|
|
||||||
alt (t,p) = Pair (List (pre p)) (ppT0 t)
|
|
||||||
|
|
||||||
pre (K s) = [lit s]
|
|
||||||
pre (Strs ts) = concatMap pre ts
|
|
||||||
pre (EPatt p) = pat p
|
|
||||||
pre t = error $ "pre "++show t
|
|
||||||
|
|
||||||
pat (PString s) = [lit s]
|
|
||||||
pat (PAlt p1 p2) = pat p1++pat p2
|
|
||||||
pat p = error $ "pat "++show p
|
|
||||||
|
|
||||||
fields = map (ppT.snd.snd) . sort . filter (not.isLockLabel.fst)
|
|
||||||
|
|
||||||
c = Const
|
|
||||||
lit s = c (show s) -- hmm
|
|
||||||
concat = if va then concat' else plusplus
|
|
||||||
where
|
|
||||||
concat' (List [List ts1]) (List [List ts2]) = List [List (ts1++ts2)]
|
|
||||||
concat' t1 t2 = Op t1 "+++" t2
|
|
||||||
pure = if va then single else id
|
|
||||||
pure' = single -- forcing the list monad
|
|
||||||
|
|
||||||
select = if va then select' else Ap
|
|
||||||
select' (List [t]) (List [p]) = Op t "!" p
|
|
||||||
select' (List [t]) p = Op t "!$" p
|
|
||||||
select' t p = Op t "!*" p
|
|
||||||
|
|
||||||
ap = if va then ap' else Ap
|
|
||||||
where
|
|
||||||
ap' (List [f]) x = fmap f x
|
|
||||||
ap' f x = Op f "<*>" x
|
|
||||||
fmap f (List [x]) = pure' (Ap f x)
|
|
||||||
fmap f x = Op f "<$>" x
|
|
||||||
|
|
||||||
-- join = if va then join' else id
|
|
||||||
join' (List [x]) = x
|
|
||||||
join' x = c "concat" `Ap` x
|
|
||||||
|
|
||||||
empty = if va then List [] else c "error" `Ap` c (show "empty variant")
|
|
||||||
variants = if va then \ ts -> join' (List (map ppT ts))
|
|
||||||
else \ (t:_) -> ppT t
|
|
||||||
|
|
||||||
aps f [] = f
|
|
||||||
aps f (a:as) = aps (ap f a) as
|
|
||||||
|
|
||||||
dedup ts =
|
|
||||||
if M.null dups
|
|
||||||
then List (map ppT ts)
|
|
||||||
else Lets [(ev i,ppT t)|(i,t)<-defs] (List (zipWith entry ts is))
|
|
||||||
where
|
|
||||||
entry t i = maybe (ppT t) (Var . ev) (M.lookup i dups)
|
|
||||||
ev i = identS ("e'"++show i)
|
|
||||||
|
|
||||||
defs = [(i1,t)|(t,i1:_:_)<-ms]
|
|
||||||
dups = M.fromList [(i2,i1)|(_,i1:is@(_:_))<-ms,i2<-i1:is]
|
|
||||||
ms = M.toList m
|
|
||||||
m = fmap sort (M.fromListWith (++) (zip ts [[i]|i<-is]))
|
|
||||||
is = [0..]::[Int]
|
|
||||||
|
|
||||||
patVars p =
|
|
||||||
case p of
|
|
||||||
PV x -> [x]
|
|
||||||
PAs x p -> x:patVars p
|
|
||||||
_ -> collectPattOp patVars p
|
|
||||||
|
|
||||||
convType va gId = ppT
|
|
||||||
where
|
|
||||||
ppT t =
|
|
||||||
case t of
|
|
||||||
Table ti tv -> Fun (ppT ti) (if va then ListT (ppT tv) else ppT tv)
|
|
||||||
RecType rt -> tcon (rcon' (map fst rt)) (fields rt)
|
|
||||||
App tf ta -> TAp (ppT tf) (ppT ta)
|
|
||||||
FV [] -> tcon0 (identS "({-empty variant-})")
|
|
||||||
Sort k -> tcon0 k
|
|
||||||
EInt n -> tcon0 (identS ("({-"++show n++"-})")) -- type level numeric literal
|
|
||||||
FV (t:ts) -> ppT t -- !!
|
|
||||||
QC (m,n) -> tcon0 (gId (qual m n))
|
|
||||||
Q (m,n) -> tcon0 (gId (qual m n))
|
|
||||||
_ -> error $ "Missing case in convType for: "++show t
|
|
||||||
|
|
||||||
fields = map (ppT.snd) . sort . filter (not.isLockLabel.fst)
|
|
||||||
|
|
||||||
proj = con . proj'
|
|
||||||
proj' l = "proj_"++render l
|
|
||||||
rcon = con . rcon_name
|
|
||||||
rcon' = identS . rcon_name
|
rcon' = identS . rcon_name
|
||||||
rcon_name ls = "R"++concat (sort ['_':render l|l<-ls,not (isLockLabel l)])
|
rcon_name ls = "R"++concat (sort ['_':l|LabelId l<-ls])
|
||||||
to_rcon = con . to_rcon'
|
|
||||||
to_rcon' = ("to_"++) . rcon_name
|
to_rcon' = ("to_"++) . rcon_name
|
||||||
|
|
||||||
recordType ls =
|
recordType ls =
|
||||||
@@ -400,31 +386,6 @@ labelClass l =
|
|||||||
r = identS "r"
|
r = identS "r"
|
||||||
a = identS "a"
|
a = identS "a"
|
||||||
|
|
||||||
paramType va gId gr q@(_,n) =
|
|
||||||
case lookupOrigInfo gr q of
|
|
||||||
Ok (m,ResParam (Just (L _ ps)) _)
|
|
||||||
{- - | m/=cPredef && m/=moduleNameS "Prelude"-} ->
|
|
||||||
((S.singleton (m,n),argTypes ps),
|
|
||||||
[Data (conap0 name) (map (param m) ps)["Eq","Ord","Show"],
|
|
||||||
Instance [] (TId (identS "EnumAll") `TAp` TId name)
|
|
||||||
[(lhs0 "enumAll",foldr1 plusplus (map (enumParam m) ps))]]
|
|
||||||
)
|
|
||||||
where name = gId (qual m n)
|
|
||||||
Ok (m,ResOper _ (Just (L _ t)))
|
|
||||||
| m==cPredef && n==cInts ->
|
|
||||||
((S.singleton (m,n),S.empty),
|
|
||||||
[Type (ConAp (gId (qual m n)) [identS "n"]) (TId (identS "Int"))])
|
|
||||||
| otherwise ->
|
|
||||||
((S.singleton (m,n),paramTypes gr t),
|
|
||||||
[Type (conap0 (gId (qual m n))) (convType va gId t)])
|
|
||||||
_ -> ((S.empty,S.empty),[])
|
|
||||||
where
|
|
||||||
param m (n,ctx) = ConAp (gId (qual m n)) [convType va gId t|(_,_,t)<-ctx]
|
|
||||||
argTypes = S.unions . map argTypes1
|
|
||||||
argTypes1 (n,ctx) = S.unions [paramTypes gr t|(_,_,t)<-ctx]
|
|
||||||
|
|
||||||
enumParam m (n,ctx) = enumCon (gId (qual m n)) (length ctx)
|
|
||||||
|
|
||||||
enumCon name arity =
|
enumCon name arity =
|
||||||
if arity==0
|
if arity==0
|
||||||
then single (Var name)
|
then single (Var name)
|
||||||
@@ -433,5 +394,23 @@ enumCon name arity =
|
|||||||
ap (List [f]) a = Op f "<$>" a
|
ap (List [f]) a = Op f "<$>" a
|
||||||
ap f a = Op f "<*>" a
|
ap f a = Op f "<*>" a
|
||||||
|
|
||||||
qual :: ModuleName -> Ident -> Ident
|
lincatName,linfunName :: CatId -> Ident
|
||||||
qual m = prefixIdent (render m++"_")
|
lincatName c = prefixIdent "Lin" (toIdent c)
|
||||||
|
linfunName c = prefixIdent "lin" (toIdent c)
|
||||||
|
|
||||||
|
class ToIdent i where toIdent :: i -> Ident
|
||||||
|
|
||||||
|
instance ToIdent ParamId where toIdent (ParamId q) = qIdentS q
|
||||||
|
instance ToIdent PredefId where toIdent (PredefId s) = identS s
|
||||||
|
instance ToIdent CatId where toIdent (CatId s) = identS s
|
||||||
|
instance ToIdent C.FunId where toIdent (FunId s) = identS s
|
||||||
|
instance ToIdent VarValueId where toIdent (VarValueId q) = qIdentS q
|
||||||
|
|
||||||
|
qIdentS = identS . unqual
|
||||||
|
|
||||||
|
unqual (Qual (ModId m) n) = m++"_"++n
|
||||||
|
unqual (Unqual n) = n
|
||||||
|
|
||||||
|
instance ToIdent VarId where
|
||||||
|
toIdent Anonymous = identW
|
||||||
|
toIdent (VarId s) = identS s
|
||||||
|
|||||||
@@ -3,9 +3,11 @@ module GF.Compile.Export where
|
|||||||
import PGF
|
import PGF
|
||||||
import PGF.Internal(ppPGF)
|
import PGF.Internal(ppPGF)
|
||||||
import GF.Compile.PGFtoHaskell
|
import GF.Compile.PGFtoHaskell
|
||||||
|
--import GF.Compile.PGFtoAbstract
|
||||||
import GF.Compile.PGFtoJava
|
import GF.Compile.PGFtoJava
|
||||||
import GF.Compile.PGFtoProlog
|
import GF.Compile.PGFtoProlog
|
||||||
import GF.Compile.PGFtoJS
|
import GF.Compile.PGFtoJS
|
||||||
|
import GF.Compile.PGFtoJSON
|
||||||
import GF.Compile.PGFtoPython
|
import GF.Compile.PGFtoPython
|
||||||
import GF.Infra.Option
|
import GF.Infra.Option
|
||||||
--import GF.Speech.CFG
|
--import GF.Speech.CFG
|
||||||
@@ -34,7 +36,10 @@ exportPGF :: Options
|
|||||||
exportPGF opts fmt pgf =
|
exportPGF opts fmt pgf =
|
||||||
case fmt of
|
case fmt of
|
||||||
FmtPGFPretty -> multi "txt" (render . ppPGF)
|
FmtPGFPretty -> multi "txt" (render . ppPGF)
|
||||||
|
FmtCanonicalGF -> [] -- canon "gf" (render80 . abstract2canonical)
|
||||||
|
FmtCanonicalJson-> []
|
||||||
FmtJavaScript -> multi "js" pgf2js
|
FmtJavaScript -> multi "js" pgf2js
|
||||||
|
FmtJSON -> multi "json" pgf2json
|
||||||
FmtPython -> multi "py" pgf2python
|
FmtPython -> multi "py" pgf2python
|
||||||
FmtHaskell -> multi "hs" (grammar2haskell opts name)
|
FmtHaskell -> multi "hs" (grammar2haskell opts name)
|
||||||
FmtJava -> multi "java" (grammar2java opts name)
|
FmtJava -> multi "java" (grammar2java opts name)
|
||||||
@@ -57,9 +62,12 @@ exportPGF opts fmt pgf =
|
|||||||
multi :: String -> (PGF -> String) -> [(FilePath,String)]
|
multi :: String -> (PGF -> String) -> [(FilePath,String)]
|
||||||
multi ext pr = [(name <.> ext, pr pgf)]
|
multi ext pr = [(name <.> ext, pr pgf)]
|
||||||
|
|
||||||
|
-- canon ext pr = [("canonical"</>name<.>ext,pr pgf)]
|
||||||
|
|
||||||
single :: String -> (PGF -> CId -> String) -> [(FilePath,String)]
|
single :: String -> (PGF -> CId -> String) -> [(FilePath,String)]
|
||||||
single ext pr = [(showCId cnc <.> ext, pr pgf cnc) | cnc <- languages pgf]
|
single ext pr = [(showCId cnc <.> ext, pr pgf cnc) | cnc <- languages pgf]
|
||||||
|
|
||||||
|
|
||||||
-- | Get the name of the concrete syntax to generate output from.
|
-- | Get the name of the concrete syntax to generate output from.
|
||||||
-- FIXME: there should be an option to change this.
|
-- FIXME: there should be an option to change this.
|
||||||
outputConcr :: PGF -> CId
|
outputConcr :: PGF -> CId
|
||||||
|
|||||||
388
src/compiler/GF/Compile/GrammarToCanonical.hs
Normal file
388
src/compiler/GF/Compile/GrammarToCanonical.hs
Normal file
@@ -0,0 +1,388 @@
|
|||||||
|
-- | Translate grammars to Canonical form
|
||||||
|
-- (a common intermediate representation to simplify export to other formats)
|
||||||
|
module GF.Compile.GrammarToCanonical(
|
||||||
|
grammar2canonical,abstract2canonical,concretes2canonical,
|
||||||
|
projection,selection
|
||||||
|
) where
|
||||||
|
import Data.List(nub,partition)
|
||||||
|
import qualified Data.Map as M
|
||||||
|
import qualified Data.Set as S
|
||||||
|
import GF.Data.ErrM
|
||||||
|
import GF.Text.Pretty
|
||||||
|
import GF.Grammar.Grammar
|
||||||
|
import GF.Grammar.Lookup(lookupOrigInfo,allOrigInfos,allParamValues)
|
||||||
|
import GF.Grammar.Macros(typeForm,collectOp,collectPattOp,mkAbs,mkApp,term2patt)
|
||||||
|
import GF.Grammar.Lockfield(isLockLabel)
|
||||||
|
import GF.Grammar.Predef(cPredef,cInts)
|
||||||
|
import GF.Compile.Compute.Predef(predef)
|
||||||
|
import GF.Compile.Compute.Value(Predefined(..))
|
||||||
|
import GF.Infra.Ident(ModuleName(..),Ident,prefixIdent,showIdent,isWildIdent)
|
||||||
|
import GF.Infra.Option(optionsPGF)
|
||||||
|
import PGF.Internal(Literal(..))
|
||||||
|
import GF.Compile.Compute.ConcreteNew(normalForm,resourceValues)
|
||||||
|
import GF.Grammar.Canonical as C
|
||||||
|
import Debug.Trace
|
||||||
|
|
||||||
|
-- | Generate Canonical code for the named abstract syntax and all associated
|
||||||
|
-- concrete syntaxes
|
||||||
|
grammar2canonical opts absname gr =
|
||||||
|
Grammar (abstract2canonical absname gr)
|
||||||
|
(map snd (concretes2canonical opts absname gr))
|
||||||
|
|
||||||
|
-- | Generate Canonical code for the named abstract syntax
|
||||||
|
abstract2canonical absname gr =
|
||||||
|
Abstract (modId absname) (convFlags gr absname) cats funs
|
||||||
|
where
|
||||||
|
cats = [CatDef (gId c) (convCtx ctx) | ((_,c),AbsCat ctx) <- adefs]
|
||||||
|
|
||||||
|
funs = [FunDef (gId f) (convType ty) |
|
||||||
|
((_,f),AbsFun (Just (L _ ty)) ma mdef _) <- adefs]
|
||||||
|
|
||||||
|
adefs = allOrigInfos gr absname
|
||||||
|
|
||||||
|
convCtx = maybe [] (map convHypo . unLoc)
|
||||||
|
convHypo (bt,name,t) =
|
||||||
|
case typeForm t of
|
||||||
|
([],(_,cat),[]) -> gId cat -- !!
|
||||||
|
|
||||||
|
convType t =
|
||||||
|
case typeForm t of
|
||||||
|
(hyps,(_,cat),args) -> Type bs (TypeApp (gId cat) as)
|
||||||
|
where
|
||||||
|
bs = map convHypo' hyps
|
||||||
|
as = map convType args
|
||||||
|
|
||||||
|
convHypo' (bt,name,t) = TypeBinding (gId name) (convType t)
|
||||||
|
|
||||||
|
|
||||||
|
-- | Generate Canonical code for the all concrete syntaxes associated with
|
||||||
|
-- the named abstract syntax in given the grammar.
|
||||||
|
concretes2canonical opts absname gr =
|
||||||
|
[(cncname,concrete2canonical gr cenv absname cnc cncmod)
|
||||||
|
| let cenv = resourceValues opts gr,
|
||||||
|
cnc<-allConcretes gr absname,
|
||||||
|
let cncname = "canonical/"++render cnc ++ ".gf" :: FilePath
|
||||||
|
Ok cncmod = lookupModule gr cnc
|
||||||
|
]
|
||||||
|
|
||||||
|
-- | Generate Canonical GF for the given concrete module.
|
||||||
|
concrete2canonical gr cenv absname cnc modinfo =
|
||||||
|
Concrete (modId cnc) (modId absname) (convFlags gr cnc)
|
||||||
|
(neededParamTypes S.empty (params defs))
|
||||||
|
[lincat|(_,Left lincat)<-defs]
|
||||||
|
[lin|(_,Right lin)<-defs]
|
||||||
|
where
|
||||||
|
defs = concatMap (toCanonical gr absname cenv) .
|
||||||
|
M.toList $
|
||||||
|
jments modinfo
|
||||||
|
|
||||||
|
params = S.toList . S.unions . map fst
|
||||||
|
|
||||||
|
neededParamTypes have [] = []
|
||||||
|
neededParamTypes have (q:qs) =
|
||||||
|
if q `S.member` have
|
||||||
|
then neededParamTypes have qs
|
||||||
|
else let ((got,need),def) = paramType gr q
|
||||||
|
in def++neededParamTypes (S.union got have) (S.toList need++qs)
|
||||||
|
|
||||||
|
toCanonical gr absname cenv (name,jment) =
|
||||||
|
case jment of
|
||||||
|
CncCat (Just (L loc typ)) _ _ pprn _ ->
|
||||||
|
[(pts,Left (LincatDef (gId name) (convType ntyp)))]
|
||||||
|
where
|
||||||
|
pts = paramTypes gr ntyp
|
||||||
|
ntyp = nf loc typ
|
||||||
|
CncFun (Just r@(cat,ctx,lincat)) (Just (L loc def)) pprn _ ->
|
||||||
|
[(tts,Right (LinDef (gId name) (map gId args) (convert gr e')))]
|
||||||
|
where
|
||||||
|
tts = tableTypes gr [e']
|
||||||
|
|
||||||
|
e' = unAbs (length params) $
|
||||||
|
nf loc (mkAbs params (mkApp def (map Vr args)))
|
||||||
|
params = [(b,x)|(b,x,_)<-ctx]
|
||||||
|
args = map snd params
|
||||||
|
|
||||||
|
AnyInd _ m -> case lookupOrigInfo gr (m,name) of
|
||||||
|
Ok (m,jment) -> toCanonical gr absname cenv (name,jment)
|
||||||
|
_ -> []
|
||||||
|
_ -> []
|
||||||
|
where
|
||||||
|
nf loc = normalForm cenv (L loc name)
|
||||||
|
-- aId n = prefixIdent "A." (gId n)
|
||||||
|
|
||||||
|
unAbs 0 t = t
|
||||||
|
unAbs n (Abs _ _ t) = unAbs (n-1) t
|
||||||
|
unAbs _ t = t
|
||||||
|
|
||||||
|
tableTypes gr ts = S.unions (map tabtys ts)
|
||||||
|
where
|
||||||
|
tabtys t =
|
||||||
|
case t of
|
||||||
|
V t cc -> S.union (paramTypes gr t) (tableTypes gr cc)
|
||||||
|
T (TTyped t) cs -> S.union (paramTypes gr t) (tableTypes gr (map snd cs))
|
||||||
|
_ -> collectOp tabtys t
|
||||||
|
|
||||||
|
paramTypes gr t =
|
||||||
|
case t of
|
||||||
|
RecType fs -> S.unions (map (paramTypes gr.snd) fs)
|
||||||
|
Table t1 t2 -> S.union (paramTypes gr t1) (paramTypes gr t2)
|
||||||
|
App tf ta -> S.union (paramTypes gr tf) (paramTypes gr ta)
|
||||||
|
Sort _ -> S.empty
|
||||||
|
EInt _ -> S.empty
|
||||||
|
Q q -> lookup q
|
||||||
|
QC q -> lookup q
|
||||||
|
FV ts -> S.unions (map (paramTypes gr) ts)
|
||||||
|
_ -> ignore
|
||||||
|
where
|
||||||
|
lookup q = case lookupOrigInfo gr q of
|
||||||
|
Ok (_,ResOper _ (Just (L _ t))) ->
|
||||||
|
S.insert q (paramTypes gr t)
|
||||||
|
Ok (_,ResParam {}) -> S.singleton q
|
||||||
|
_ -> ignore
|
||||||
|
|
||||||
|
ignore = trace ("Ignore: "++show t) S.empty
|
||||||
|
|
||||||
|
|
||||||
|
convert gr = convert' gr []
|
||||||
|
|
||||||
|
convert' gr vs = ppT
|
||||||
|
where
|
||||||
|
ppT0 = convert' gr vs
|
||||||
|
ppTv vs' = convert' gr vs'
|
||||||
|
|
||||||
|
ppT t =
|
||||||
|
case t of
|
||||||
|
-- Abs b x t -> ...
|
||||||
|
-- V ty ts -> VTableValue (convType ty) (map ppT ts)
|
||||||
|
V ty ts -> TableValue (convType ty) [TableRow (ppP p) (ppT t)|(p,t)<-zip ps ts]
|
||||||
|
where
|
||||||
|
Ok pts = allParamValues gr ty
|
||||||
|
Ok ps = mapM term2patt pts
|
||||||
|
T (TTyped ty) cs -> TableValue (convType ty) (map ppCase cs)
|
||||||
|
S t p -> selection (ppT t) (ppT p)
|
||||||
|
C t1 t2 -> concatValue (ppT t1) (ppT t2)
|
||||||
|
App f a -> ap (ppT f) (ppT a)
|
||||||
|
R r -> RecordValue (fields r)
|
||||||
|
P t l -> projection (ppT t) (lblId l)
|
||||||
|
Vr x -> VarValue (gId x)
|
||||||
|
Cn x -> VarValue (gId x) -- hmm
|
||||||
|
Con c -> ParamConstant (Param (gId c) [])
|
||||||
|
Sort k -> VarValue (gId k)
|
||||||
|
EInt n -> LiteralValue (IntConstant n)
|
||||||
|
Q (m,n) -> if m==cPredef then ppPredef n else VarValue ((gQId m n))
|
||||||
|
QC (m,n) -> ParamConstant (Param ((gQId m n)) [])
|
||||||
|
K s -> LiteralValue (StrConstant s)
|
||||||
|
Empty -> LiteralValue (StrConstant "")
|
||||||
|
FV ts -> VariantValue (map ppT ts)
|
||||||
|
Alts t' vs -> alts vs (ppT t')
|
||||||
|
_ -> error $ "convert' "++show t
|
||||||
|
|
||||||
|
ppCase (p,t) = TableRow (ppP p) (ppTv (patVars p++vs) t)
|
||||||
|
|
||||||
|
ppPredef n =
|
||||||
|
case predef n of
|
||||||
|
Ok BIND -> p "BIND"
|
||||||
|
Ok SOFT_BIND -> p "SOFT_BIND"
|
||||||
|
Ok SOFT_SPACE -> p "SOFT_SPACE"
|
||||||
|
Ok CAPIT -> p "CAPIT"
|
||||||
|
Ok ALL_CAPIT -> p "ALL_CAPIT"
|
||||||
|
_ -> VarValue (gQId cPredef n) -- hmm
|
||||||
|
where
|
||||||
|
p = PredefValue . PredefId
|
||||||
|
|
||||||
|
ppP p =
|
||||||
|
case p of
|
||||||
|
PC c ps -> ParamPattern (Param (gId c) (map ppP ps))
|
||||||
|
PP (m,c) ps -> ParamPattern (Param ((gQId m c)) (map ppP ps))
|
||||||
|
PR r -> RecordPattern (fields r) {-
|
||||||
|
PW -> WildPattern
|
||||||
|
PV x -> VarP x
|
||||||
|
PString s -> Lit (show s) -- !!
|
||||||
|
PInt i -> Lit (show i)
|
||||||
|
PFloat x -> Lit (show x)
|
||||||
|
PT _ p -> ppP p
|
||||||
|
PAs x p -> AsP x (ppP p) -}
|
||||||
|
where
|
||||||
|
fields = map field . filter (not.isLockLabel.fst)
|
||||||
|
field (l,p) = RecordRow (lblId l) (ppP p)
|
||||||
|
|
||||||
|
-- patToParam p = case ppP p of ParamPattern pv -> pv
|
||||||
|
|
||||||
|
-- token s = single (c "TK" `Ap` lit s)
|
||||||
|
|
||||||
|
alts vs = PreValue (map alt vs)
|
||||||
|
where
|
||||||
|
alt (t,p) = (pre p,ppT0 t)
|
||||||
|
|
||||||
|
pre (K s) = [s]
|
||||||
|
pre (Strs ts) = concatMap pre ts
|
||||||
|
pre (EPatt p) = pat p
|
||||||
|
pre t = error $ "pre "++show t
|
||||||
|
|
||||||
|
pat (PString s) = [s]
|
||||||
|
pat (PAlt p1 p2) = pat p1++pat p2
|
||||||
|
pat (PSeq p1 p2) = [s1++s2 | s1<-pat p1, s2<-pat p2]
|
||||||
|
pat p = error $ "pat "++show p
|
||||||
|
|
||||||
|
fields = map field . filter (not.isLockLabel.fst)
|
||||||
|
field (l,(_,t)) = RecordRow (lblId l) (ppT t)
|
||||||
|
--c = Const
|
||||||
|
--c = VarValue . VarValueId
|
||||||
|
--lit s = c (show s) -- hmm
|
||||||
|
|
||||||
|
ap f a = case f of
|
||||||
|
ParamConstant (Param p ps) ->
|
||||||
|
ParamConstant (Param p (ps++[a]))
|
||||||
|
_ -> error $ "convert' ap: "++render (ppA f <+> ppA a)
|
||||||
|
|
||||||
|
concatValue v1 v2 =
|
||||||
|
case (v1,v2) of
|
||||||
|
(LiteralValue (StrConstant ""),_) -> v2
|
||||||
|
(_,LiteralValue (StrConstant "")) -> v1
|
||||||
|
_ -> ConcatValue v1 v2
|
||||||
|
|
||||||
|
-- | Smart constructor for projections
|
||||||
|
projection r l = maybe (Projection r l) id (proj r l)
|
||||||
|
|
||||||
|
proj r l =
|
||||||
|
case r of
|
||||||
|
RecordValue r -> case [v|RecordRow l' v<-r,l'==l] of
|
||||||
|
[v] -> Just v
|
||||||
|
_ -> Nothing
|
||||||
|
_ -> Nothing
|
||||||
|
|
||||||
|
-- | Smart constructor for selections
|
||||||
|
selection t v =
|
||||||
|
-- Note: impossible cases can become possible after grammar transformation
|
||||||
|
case t of
|
||||||
|
TableValue tt r ->
|
||||||
|
case nub [rv|TableRow _ rv<-keep] of
|
||||||
|
[rv] -> rv
|
||||||
|
_ -> Selection (TableValue tt r') v
|
||||||
|
where
|
||||||
|
-- Don't introduce wildcard patterns, true to the canonical format,
|
||||||
|
-- annotate (or eliminate) rhs in impossible rows
|
||||||
|
r' = map trunc r
|
||||||
|
trunc r@(TableRow p e) = if mightMatchRow v r
|
||||||
|
then r
|
||||||
|
else TableRow p (impossible e)
|
||||||
|
{-
|
||||||
|
-- Creates smaller tables, but introduces wildcard patterns
|
||||||
|
r' = if null discard
|
||||||
|
then r
|
||||||
|
else keep++[TableRow WildPattern impossible]
|
||||||
|
-}
|
||||||
|
(keep,discard) = partition (mightMatchRow v) r
|
||||||
|
_ -> Selection t v
|
||||||
|
|
||||||
|
impossible = CommentedValue "impossible"
|
||||||
|
|
||||||
|
mightMatchRow v (TableRow p _) =
|
||||||
|
case p of
|
||||||
|
WildPattern -> True
|
||||||
|
_ -> mightMatch v p
|
||||||
|
|
||||||
|
mightMatch v p =
|
||||||
|
case v of
|
||||||
|
ConcatValue _ _ -> False
|
||||||
|
ParamConstant (Param c1 pvs) ->
|
||||||
|
case p of
|
||||||
|
ParamPattern (Param c2 pps) -> c1==c2 && length pvs==length pps &&
|
||||||
|
and [mightMatch v p|(v,p)<-zip pvs pps]
|
||||||
|
_ -> False
|
||||||
|
RecordValue rv ->
|
||||||
|
case p of
|
||||||
|
RecordPattern rp ->
|
||||||
|
and [maybe False (flip mightMatch p) (proj v l) | RecordRow l p<-rp]
|
||||||
|
_ -> False
|
||||||
|
_ -> True
|
||||||
|
|
||||||
|
patVars p =
|
||||||
|
case p of
|
||||||
|
PV x -> [x]
|
||||||
|
PAs x p -> x:patVars p
|
||||||
|
_ -> collectPattOp patVars p
|
||||||
|
|
||||||
|
convType = ppT
|
||||||
|
where
|
||||||
|
ppT t =
|
||||||
|
case t of
|
||||||
|
Table ti tv -> TableType (ppT ti) (ppT tv)
|
||||||
|
RecType rt -> RecordType (convFields rt)
|
||||||
|
-- App tf ta -> TAp (ppT tf) (ppT ta)
|
||||||
|
-- FV [] -> tcon0 (identS "({-empty variant-})")
|
||||||
|
Sort k -> convSort k
|
||||||
|
-- EInt n -> tcon0 (identS ("({-"++show n++"-})")) -- type level numeric literal
|
||||||
|
FV (t:ts) -> ppT t -- !!
|
||||||
|
QC (m,n) -> ParamType (ParamTypeId ((gQId m n)))
|
||||||
|
Q (m,n) -> ParamType (ParamTypeId ((gQId m n)))
|
||||||
|
_ -> error $ "Missing case in convType for: "++show t
|
||||||
|
|
||||||
|
convFields = map convField . filter (not.isLockLabel.fst)
|
||||||
|
convField (l,r) = RecordRow (lblId l) (ppT r)
|
||||||
|
|
||||||
|
convSort k = case showIdent k of
|
||||||
|
"Float" -> FloatType
|
||||||
|
"Int" -> IntType
|
||||||
|
"Str" -> StrType
|
||||||
|
_ -> error ("convSort "++show k)
|
||||||
|
|
||||||
|
toParamType t = case convType t of
|
||||||
|
ParamType pt -> pt
|
||||||
|
_ -> error ("toParamType "++show t)
|
||||||
|
|
||||||
|
toParamId t = case toParamType t of
|
||||||
|
ParamTypeId p -> p
|
||||||
|
|
||||||
|
paramType gr q@(_,n) =
|
||||||
|
case lookupOrigInfo gr q of
|
||||||
|
Ok (m,ResParam (Just (L _ ps)) _)
|
||||||
|
{- - | m/=cPredef && m/=moduleNameS "Prelude"-} ->
|
||||||
|
((S.singleton (m,n),argTypes ps),
|
||||||
|
[ParamDef name (map (param m) ps)]
|
||||||
|
)
|
||||||
|
where name = (gQId m n)
|
||||||
|
Ok (m,ResOper _ (Just (L _ t)))
|
||||||
|
| m==cPredef && n==cInts ->
|
||||||
|
((S.empty,S.empty),[]) {-
|
||||||
|
((S.singleton (m,n),S.empty),
|
||||||
|
[Type (ConAp ((gQId m n)) [identS "n"]) (TId (identS "Int"))])-}
|
||||||
|
| otherwise ->
|
||||||
|
((S.singleton (m,n),paramTypes gr t),
|
||||||
|
[ParamAliasDef ((gQId m n)) (convType t)])
|
||||||
|
_ -> ((S.empty,S.empty),[])
|
||||||
|
where
|
||||||
|
param m (n,ctx) = Param ((gQId m n)) [toParamId t|(_,_,t)<-ctx]
|
||||||
|
argTypes = S.unions . map argTypes1
|
||||||
|
argTypes1 (n,ctx) = S.unions [paramTypes gr t|(_,_,t)<-ctx]
|
||||||
|
|
||||||
|
lblId = LabelId . render -- hmm
|
||||||
|
modId (MN m) = ModId (showIdent m)
|
||||||
|
|
||||||
|
class FromIdent i where gId :: Ident -> i
|
||||||
|
|
||||||
|
instance FromIdent VarId where
|
||||||
|
gId i = if isWildIdent i then Anonymous else VarId (showIdent i)
|
||||||
|
|
||||||
|
instance FromIdent C.FunId where gId = C.FunId . showIdent
|
||||||
|
instance FromIdent CatId where gId = CatId . showIdent
|
||||||
|
instance FromIdent ParamId where gId = ParamId . unqual
|
||||||
|
instance FromIdent VarValueId where gId = VarValueId . unqual
|
||||||
|
|
||||||
|
class FromIdent i => QualIdent i where gQId :: ModuleName -> Ident -> i
|
||||||
|
|
||||||
|
instance QualIdent ParamId where gQId m n = ParamId (qual m n)
|
||||||
|
instance QualIdent VarValueId where gQId m n = VarValueId (qual m n)
|
||||||
|
|
||||||
|
qual m n = Qual (modId m) (showIdent n)
|
||||||
|
unqual n = Unqual (showIdent n)
|
||||||
|
|
||||||
|
convFlags gr mn =
|
||||||
|
Flags [(n,convLit v) |
|
||||||
|
(n,v)<-err (const []) (optionsPGF.mflags) (lookupModule gr mn)]
|
||||||
|
where
|
||||||
|
convLit l =
|
||||||
|
case l of
|
||||||
|
LStr s -> Str s
|
||||||
|
LInt i -> C.Int i
|
||||||
|
LFlt d -> Flt d
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
{-# LANGUAGE BangPatterns, FlexibleContexts #-}
|
{-# LANGUAGE BangPatterns, FlexibleContexts, MagicHash #-}
|
||||||
module GF.Compile.GrammarToPGF (mkCanon2pgf) where
|
module GF.Compile.GrammarToPGF (mkCanon2pgf) where
|
||||||
|
|
||||||
--import GF.Compile.Export
|
--import GF.Compile.Export
|
||||||
@@ -30,6 +30,10 @@ import qualified Data.Map as Map
|
|||||||
import qualified Data.IntMap as IntMap
|
import qualified Data.IntMap as IntMap
|
||||||
import Data.Array.IArray
|
import Data.Array.IArray
|
||||||
|
|
||||||
|
import Data.Char
|
||||||
|
import GHC.Prim
|
||||||
|
import GHC.Base(getTag)
|
||||||
|
|
||||||
mkCanon2pgf :: Options -> SourceGrammar -> ModuleName -> IOE D.PGF
|
mkCanon2pgf :: Options -> SourceGrammar -> ModuleName -> IOE D.PGF
|
||||||
mkCanon2pgf opts gr am = do
|
mkCanon2pgf opts gr am = do
|
||||||
(an,abs) <- mkAbstr am
|
(an,abs) <- mkAbstr am
|
||||||
@@ -59,7 +63,9 @@ mkCanon2pgf opts gr am = do
|
|||||||
[(0,i2i f) | ((m,f),AbsFun (Just (L _ ty)) _ _ (Just True)) <- adefs, snd (GM.valCat ty) == cat]
|
[(0,i2i f) | ((m,f),AbsFun (Just (L _ ty)) _ _ (Just True)) <- adefs, snd (GM.valCat ty) == cat]
|
||||||
|
|
||||||
mkConcr cm = do
|
mkConcr cm = do
|
||||||
let cflags = err (const noOptions) mflags (lookupModule gr cm)
|
let cflags = err (const noOptions) mflags (lookupModule gr cm)
|
||||||
|
ciCmp | flag optCaseSensitive cflags = compare
|
||||||
|
| otherwise = compareCaseInsensitve
|
||||||
|
|
||||||
(ex_seqs,cdefs) <- addMissingPMCFGs
|
(ex_seqs,cdefs) <- addMissingPMCFGs
|
||||||
Map.empty
|
Map.empty
|
||||||
@@ -68,14 +74,14 @@ mkCanon2pgf opts gr am = do
|
|||||||
|
|
||||||
let flags = Map.fromList [(mkCId f,x) | (f,x) <- optionsPGF cflags]
|
let flags = Map.fromList [(mkCId f,x) | (f,x) <- optionsPGF cflags]
|
||||||
|
|
||||||
seqs = (mkSetArray . Set.fromList . concat) $
|
seqs = (mkArray . sortNubBy ciCmp . concat) $
|
||||||
(Map.keys ex_seqs : [maybe [] elems (mseqs mi) | (m,mi) <- allExtends gr cm])
|
(Map.keys ex_seqs : [maybe [] elems (mseqs mi) | (m,mi) <- allExtends gr cm])
|
||||||
|
|
||||||
ex_seqs_arr = mkMapArray ex_seqs :: Array SeqId Sequence
|
ex_seqs_arr = mkMapArray ex_seqs :: Array SeqId Sequence
|
||||||
|
|
||||||
!(!fid_cnt1,!cnccats) = genCncCats gr am cm cdefs
|
!(!fid_cnt1,!cnccats) = genCncCats gr am cm cdefs
|
||||||
!(!fid_cnt2,!productions,!lindefs,!linrefs,!cncfuns)
|
!(!fid_cnt2,!productions,!lindefs,!linrefs,!cncfuns)
|
||||||
= genCncFuns gr am cm ex_seqs_arr seqs cdefs fid_cnt1 cnccats
|
= genCncFuns gr am cm ex_seqs_arr ciCmp seqs cdefs fid_cnt1 cnccats
|
||||||
|
|
||||||
printnames = genPrintNames cdefs
|
printnames = genPrintNames cdefs
|
||||||
return (mi2i cm, D.Concr flags
|
return (mi2i cm, D.Concr flags
|
||||||
@@ -186,6 +192,7 @@ genCncFuns :: Grammar
|
|||||||
-> ModuleName
|
-> ModuleName
|
||||||
-> ModuleName
|
-> ModuleName
|
||||||
-> Array SeqId Sequence
|
-> Array SeqId Sequence
|
||||||
|
-> (Sequence -> Sequence -> Ordering)
|
||||||
-> Array SeqId Sequence
|
-> Array SeqId Sequence
|
||||||
-> [(QIdent, Info)]
|
-> [(QIdent, Info)]
|
||||||
-> FId
|
-> FId
|
||||||
@@ -195,7 +202,7 @@ genCncFuns :: Grammar
|
|||||||
IntMap.IntMap [FunId],
|
IntMap.IntMap [FunId],
|
||||||
IntMap.IntMap [FunId],
|
IntMap.IntMap [FunId],
|
||||||
Array FunId D.CncFun)
|
Array FunId D.CncFun)
|
||||||
genCncFuns gr am cm ex_seqs seqs cdefs fid_cnt cnccats =
|
genCncFuns gr am cm ex_seqs ciCmp seqs cdefs fid_cnt cnccats =
|
||||||
let (fid_cnt1,funs_cnt1,funs1,lindefs,linrefs) = mkCncCats cdefs fid_cnt 0 [] IntMap.empty IntMap.empty
|
let (fid_cnt1,funs_cnt1,funs1,lindefs,linrefs) = mkCncCats cdefs fid_cnt 0 [] IntMap.empty IntMap.empty
|
||||||
(fid_cnt2,funs_cnt2,funs2,prods) = mkCncFuns cdefs fid_cnt1 funs_cnt1 funs1 lindefs Map.empty IntMap.empty
|
(fid_cnt2,funs_cnt2,funs2,prods) = mkCncFuns cdefs fid_cnt1 funs_cnt1 funs1 lindefs Map.empty IntMap.empty
|
||||||
in (fid_cnt2,prods,lindefs,linrefs,array (0,funs_cnt2-1) funs2)
|
in (fid_cnt2,prods,lindefs,linrefs,array (0,funs_cnt2-1) funs2)
|
||||||
@@ -284,7 +291,7 @@ genCncFuns gr am cm ex_seqs seqs cdefs fid_cnt cnccats =
|
|||||||
newIndex mseqs i = binSearch (mseqs ! i) seqs (bounds seqs)
|
newIndex mseqs i = binSearch (mseqs ! i) seqs (bounds seqs)
|
||||||
|
|
||||||
binSearch v arr (i,j)
|
binSearch v arr (i,j)
|
||||||
| i <= j = case compare v (arr ! k) of
|
| i <= j = case ciCmp v (arr ! k) of
|
||||||
LT -> binSearch v arr (i,k-1)
|
LT -> binSearch v arr (i,k-1)
|
||||||
EQ -> k
|
EQ -> k
|
||||||
GT -> binSearch v arr (k+1,j)
|
GT -> binSearch v arr (k+1,j)
|
||||||
@@ -303,6 +310,121 @@ genPrintNames cdefs =
|
|||||||
flatten (Alts x _) = flatten x
|
flatten (Alts x _) = flatten x
|
||||||
flatten (C x y) = flatten x +++ flatten y
|
flatten (C x y) = flatten x +++ flatten y
|
||||||
|
|
||||||
--mkArray lst = listArray (0,length lst-1) lst
|
mkArray lst = listArray (0,length lst-1) lst
|
||||||
mkMapArray map = array (0,Map.size map-1) [(v,k) | (k,v) <- Map.toList map]
|
mkMapArray map = array (0,Map.size map-1) [(v,k) | (k,v) <- Map.toList map]
|
||||||
mkSetArray set = listArray (0,Set.size set-1) [v | v <- Set.toList set]
|
|
||||||
|
-- The following is a version of Data.List.sortBy which together
|
||||||
|
-- with the sorting also eliminates duplicate values
|
||||||
|
sortNubBy cmp = mergeAll . sequences
|
||||||
|
where
|
||||||
|
sequences (a:b:xs) =
|
||||||
|
case cmp a b of
|
||||||
|
GT -> descending b [a] xs
|
||||||
|
EQ -> sequences (b:xs)
|
||||||
|
LT -> ascending b (a:) xs
|
||||||
|
sequences xs = [xs]
|
||||||
|
|
||||||
|
descending a as [] = [a:as]
|
||||||
|
descending a as (b:bs) =
|
||||||
|
case cmp a b of
|
||||||
|
GT -> descending b (a:as) bs
|
||||||
|
EQ -> descending a as bs
|
||||||
|
LT -> (a:as) : sequences (b:bs)
|
||||||
|
|
||||||
|
ascending a as [] = let !x = as [a]
|
||||||
|
in [x]
|
||||||
|
ascending a as (b:bs) =
|
||||||
|
case cmp a b of
|
||||||
|
GT -> let !x = as [a]
|
||||||
|
in x : sequences (b:bs)
|
||||||
|
EQ -> ascending a as bs
|
||||||
|
LT -> ascending b (\ys -> as (a:ys)) bs
|
||||||
|
|
||||||
|
mergeAll [x] = x
|
||||||
|
mergeAll xs = mergeAll (mergePairs xs)
|
||||||
|
|
||||||
|
mergePairs (a:b:xs) = let !x = merge a b
|
||||||
|
in x : mergePairs xs
|
||||||
|
mergePairs xs = xs
|
||||||
|
|
||||||
|
merge as@(a:as') bs@(b:bs') =
|
||||||
|
case cmp a b of
|
||||||
|
GT -> b:merge as bs'
|
||||||
|
EQ -> a:merge as' bs'
|
||||||
|
LT -> a:merge as' bs
|
||||||
|
merge [] bs = bs
|
||||||
|
merge as [] = as
|
||||||
|
|
||||||
|
-- The following function does case-insensitive comparison of sequences.
|
||||||
|
-- This is used to allow case-insensitive parsing, while
|
||||||
|
-- the linearizer still has access to the original cases.
|
||||||
|
compareCaseInsensitve s1 s2 =
|
||||||
|
compareSeq (elems s1) (elems s2)
|
||||||
|
where
|
||||||
|
compareSeq [] [] = EQ
|
||||||
|
compareSeq [] _ = LT
|
||||||
|
compareSeq _ [] = GT
|
||||||
|
compareSeq (x:xs) (y:ys) =
|
||||||
|
case compareSym x y of
|
||||||
|
EQ -> compareSeq xs ys
|
||||||
|
x -> x
|
||||||
|
|
||||||
|
compareSym s1 s2 =
|
||||||
|
case s1 of
|
||||||
|
D.SymCat d1 r1
|
||||||
|
-> case s2 of
|
||||||
|
D.SymCat d2 r2
|
||||||
|
-> case compare d1 d2 of
|
||||||
|
EQ -> r1 `compare` r2
|
||||||
|
x -> x
|
||||||
|
_ -> LT
|
||||||
|
D.SymLit d1 r1
|
||||||
|
-> case s2 of
|
||||||
|
D.SymCat {} -> GT
|
||||||
|
D.SymLit d2 r2
|
||||||
|
-> case compare d1 d2 of
|
||||||
|
EQ -> r1 `compare` r2
|
||||||
|
x -> x
|
||||||
|
_ -> LT
|
||||||
|
D.SymVar d1 r1
|
||||||
|
-> if tagToEnum# (getTag s2 ># 2#)
|
||||||
|
then LT
|
||||||
|
else case s2 of
|
||||||
|
D.SymVar d2 r2
|
||||||
|
-> case compare d1 d2 of
|
||||||
|
EQ -> r1 `compare` r2
|
||||||
|
x -> x
|
||||||
|
_ -> GT
|
||||||
|
D.SymKS t1
|
||||||
|
-> if tagToEnum# (getTag s2 ># 3#)
|
||||||
|
then LT
|
||||||
|
else case s2 of
|
||||||
|
D.SymKS t2 -> t1 `compareToken` t2
|
||||||
|
_ -> GT
|
||||||
|
D.SymKP a1 b1
|
||||||
|
-> if tagToEnum# (getTag s2 ># 4#)
|
||||||
|
then LT
|
||||||
|
else case s2 of
|
||||||
|
D.SymKP a2 b2
|
||||||
|
-> case compare a1 a2 of
|
||||||
|
EQ -> b1 `compare` b2
|
||||||
|
x -> x
|
||||||
|
_ -> GT
|
||||||
|
_ -> let t1 = getTag s1
|
||||||
|
t2 = getTag s2
|
||||||
|
in if tagToEnum# (t1 <# t2)
|
||||||
|
then LT
|
||||||
|
else if tagToEnum# (t1 ==# t2)
|
||||||
|
then EQ
|
||||||
|
else GT
|
||||||
|
|
||||||
|
compareToken [] [] = EQ
|
||||||
|
compareToken [] _ = LT
|
||||||
|
compareToken _ [] = GT
|
||||||
|
compareToken (x:xs) (y:ys)
|
||||||
|
| x == y = compareToken xs ys
|
||||||
|
| otherwise = case compare (toLower x) (toLower y) of
|
||||||
|
EQ -> case compareToken xs ys of
|
||||||
|
EQ -> compare x y
|
||||||
|
x -> x
|
||||||
|
x -> x
|
||||||
|
|||||||
156
src/compiler/GF/Compile/PGFtoJSON.hs
Normal file
156
src/compiler/GF/Compile/PGFtoJSON.hs
Normal file
@@ -0,0 +1,156 @@
|
|||||||
|
module GF.Compile.PGFtoJSON (pgf2json) where
|
||||||
|
|
||||||
|
import PGF (showCId)
|
||||||
|
import qualified PGF.Internal as M
|
||||||
|
import PGF.Internal (
|
||||||
|
Abstr,
|
||||||
|
CId,
|
||||||
|
CncCat(..),
|
||||||
|
CncFun(..),
|
||||||
|
Concr,
|
||||||
|
DotPos,
|
||||||
|
Equation(..),
|
||||||
|
Literal(..),
|
||||||
|
PArg(..),
|
||||||
|
PGF,
|
||||||
|
Production(..),
|
||||||
|
Symbol(..),
|
||||||
|
Type,
|
||||||
|
absname,
|
||||||
|
abstract,
|
||||||
|
cflags,
|
||||||
|
cnccats,
|
||||||
|
cncfuns,
|
||||||
|
concretes,
|
||||||
|
funs,
|
||||||
|
productions,
|
||||||
|
sequences,
|
||||||
|
totalCats
|
||||||
|
)
|
||||||
|
|
||||||
|
import qualified Text.JSON as JSON
|
||||||
|
import Text.JSON (JSValue(..))
|
||||||
|
|
||||||
|
import qualified Data.Array.IArray as Array
|
||||||
|
import Data.Map (Map)
|
||||||
|
import qualified Data.Set as Set
|
||||||
|
import qualified Data.Map as Map
|
||||||
|
import qualified Data.IntMap as IntMap
|
||||||
|
|
||||||
|
pgf2json :: PGF -> String
|
||||||
|
pgf2json pgf =
|
||||||
|
JSON.encode $ JSON.makeObj
|
||||||
|
[ ("abstract", json_abstract)
|
||||||
|
, ("concretes", json_concretes)
|
||||||
|
]
|
||||||
|
where
|
||||||
|
n = showCId $ absname pgf
|
||||||
|
as = abstract pgf
|
||||||
|
cs = Map.assocs (concretes pgf)
|
||||||
|
start = showCId $ M.lookStartCat pgf
|
||||||
|
json_abstract = abstract2json n start as
|
||||||
|
json_concretes = JSON.makeObj $ map concrete2json cs
|
||||||
|
|
||||||
|
abstract2json :: String -> String -> Abstr -> JSValue
|
||||||
|
abstract2json name start ds =
|
||||||
|
JSON.makeObj
|
||||||
|
[ ("name", mkJSStr name)
|
||||||
|
, ("startcat", mkJSStr start)
|
||||||
|
, ("funs", JSON.makeObj $ map absdef2json (Map.assocs (funs ds)))
|
||||||
|
]
|
||||||
|
|
||||||
|
absdef2json :: (CId,(Type,Int,Maybe ([Equation],[[M.Instr]]),Double)) -> (String,JSValue)
|
||||||
|
absdef2json (f,(typ,_,_,_)) = (showCId f,sig)
|
||||||
|
where
|
||||||
|
(args,cat) = M.catSkeleton typ
|
||||||
|
sig = JSON.makeObj
|
||||||
|
[ ("args", JSArray $ map (mkJSStr.showCId) args)
|
||||||
|
, ("cat", mkJSStr $ showCId cat)
|
||||||
|
]
|
||||||
|
|
||||||
|
lit2json :: Literal -> JSValue
|
||||||
|
lit2json (LStr s) = mkJSStr s
|
||||||
|
lit2json (LInt n) = mkJSInt n
|
||||||
|
lit2json (LFlt d) = JSRational True (toRational d)
|
||||||
|
|
||||||
|
concrete2json :: (CId,Concr) -> (String,JSValue)
|
||||||
|
concrete2json (c,cnc) = (showCId c,obj)
|
||||||
|
where
|
||||||
|
obj = JSON.makeObj
|
||||||
|
[ ("flags", JSON.makeObj [ (showCId k, lit2json v) | (k,v) <- Map.toList (cflags cnc) ])
|
||||||
|
, ("productions", JSON.makeObj [ (show cat, JSArray (map frule2json (Set.toList set))) | (cat,set) <- IntMap.toList (productions cnc)])
|
||||||
|
, ("functions", JSArray (map ffun2json (Array.elems (cncfuns cnc))))
|
||||||
|
, ("sequences", JSArray (map seq2json (Array.elems (sequences cnc))))
|
||||||
|
, ("categories", JSON.makeObj $ map cats2json (Map.assocs (cnccats cnc)))
|
||||||
|
, ("totalfids", mkJSInt (totalCats cnc))
|
||||||
|
]
|
||||||
|
|
||||||
|
cats2json :: (CId, CncCat) -> (String,JSValue)
|
||||||
|
cats2json (c,CncCat start end _) = (showCId c, ixs)
|
||||||
|
where
|
||||||
|
ixs = JSON.makeObj
|
||||||
|
[ ("start", mkJSInt start)
|
||||||
|
, ("end", mkJSInt end)
|
||||||
|
]
|
||||||
|
|
||||||
|
frule2json :: Production -> JSValue
|
||||||
|
frule2json (PApply fid args) =
|
||||||
|
JSON.makeObj
|
||||||
|
[ ("type", mkJSStr "Apply")
|
||||||
|
, ("fid", mkJSInt fid)
|
||||||
|
, ("args", JSArray (map farg2json args))
|
||||||
|
]
|
||||||
|
frule2json (PCoerce arg) =
|
||||||
|
JSON.makeObj
|
||||||
|
[ ("type", mkJSStr "Coerce")
|
||||||
|
, ("arg", mkJSInt arg)
|
||||||
|
]
|
||||||
|
|
||||||
|
farg2json :: PArg -> JSValue
|
||||||
|
farg2json (PArg hypos fid) =
|
||||||
|
JSON.makeObj
|
||||||
|
[ ("type", mkJSStr "PArg")
|
||||||
|
, ("hypos", JSArray $ map (mkJSInt . snd) hypos)
|
||||||
|
, ("fid", mkJSInt fid)
|
||||||
|
]
|
||||||
|
|
||||||
|
ffun2json :: CncFun -> JSValue
|
||||||
|
ffun2json (CncFun f lins) =
|
||||||
|
JSON.makeObj
|
||||||
|
[ ("name", mkJSStr $ showCId f)
|
||||||
|
, ("lins", JSArray (map mkJSInt (Array.elems lins)))
|
||||||
|
]
|
||||||
|
|
||||||
|
seq2json :: Array.Array DotPos Symbol -> JSValue
|
||||||
|
seq2json seq = JSArray [sym2json s | s <- Array.elems seq]
|
||||||
|
|
||||||
|
sym2json :: Symbol -> JSValue
|
||||||
|
sym2json (SymCat n l) = new "SymCat" [mkJSInt n, mkJSInt l]
|
||||||
|
sym2json (SymLit n l) = new "SymLit" [mkJSInt n, mkJSInt l]
|
||||||
|
sym2json (SymVar n l) = new "SymVar" [mkJSInt n, mkJSInt l]
|
||||||
|
sym2json (SymKS t) = new "SymKS" [mkJSStr t]
|
||||||
|
sym2json (SymKP ts alts) = new "SymKP" [JSArray (map sym2json ts), JSArray (map alt2json alts)]
|
||||||
|
sym2json SymBIND = new "SymKS" [mkJSStr "&+"]
|
||||||
|
sym2json SymSOFT_BIND = new "SymKS" [mkJSStr "&+"]
|
||||||
|
sym2json SymSOFT_SPACE = new "SymKS" [mkJSStr "&+"]
|
||||||
|
sym2json SymCAPIT = new "SymKS" [mkJSStr "&|"]
|
||||||
|
sym2json SymALL_CAPIT = new "SymKS" [mkJSStr "&|"]
|
||||||
|
sym2json SymNE = new "SymNE" []
|
||||||
|
|
||||||
|
alt2json :: ([Symbol],[String]) -> JSValue
|
||||||
|
alt2json (ps,ts) = new "Alt" [JSArray (map sym2json ps), JSArray (map mkJSStr ts)]
|
||||||
|
|
||||||
|
new :: String -> [JSValue] -> JSValue
|
||||||
|
new f xs =
|
||||||
|
JSON.makeObj
|
||||||
|
[ ("type", mkJSStr f)
|
||||||
|
, ("args", JSArray xs)
|
||||||
|
]
|
||||||
|
|
||||||
|
-- | Make JSON value from string
|
||||||
|
mkJSStr :: String -> JSValue
|
||||||
|
mkJSStr = JSString . JSON.toJSString
|
||||||
|
|
||||||
|
-- | Make JSON value from integer
|
||||||
|
mkJSInt :: Integral a => a -> JSValue
|
||||||
|
mkJSInt = JSRational False . toRational
|
||||||
@@ -360,12 +360,13 @@ getOverload gr g mt ot = case appForm ot of
|
|||||||
nest 2 (showTypes pre)
|
nest 2 (showTypes pre)
|
||||||
return (mkApp fun tts, val)
|
return (mkApp fun tts, val)
|
||||||
([],[]) -> do
|
([],[]) -> do
|
||||||
checkError $ "no overload instance of" <+> ppTerm Unqualified 0 f $$
|
checkError $ "no overload instance of" <+> ppTerm Qualified 0 f $$
|
||||||
"for" $$
|
maybe empty (\x -> "with value type" <+> ppType x) mt $$
|
||||||
|
"for argument list" $$
|
||||||
nest 2 stysError $$
|
nest 2 stysError $$
|
||||||
"among" $$
|
"among alternatives" $$
|
||||||
nest 2 (vcat stypsError) $$
|
nest 2 (vcat stypsError)
|
||||||
maybe empty (\x -> "with value type" <+> ppType x) mt
|
|
||||||
|
|
||||||
(vfs1,vfs2) -> case (noProds vfs1,noProds vfs2) of
|
(vfs1,vfs2) -> case (noProds vfs1,noProds vfs2) of
|
||||||
([(val,fun)],_) -> do
|
([(val,fun)],_) -> do
|
||||||
|
|||||||
232
src/compiler/GF/Compile/pgf.schema.json
Normal file
232
src/compiler/GF/Compile/pgf.schema.json
Normal file
@@ -0,0 +1,232 @@
|
|||||||
|
{
|
||||||
|
"$schema": "http://json-schema.org/draft-07/schema#",
|
||||||
|
"$id": "http://grammaticalframework.org/pgf.schema.json",
|
||||||
|
"type": "object",
|
||||||
|
"title": "PGF JSON Schema",
|
||||||
|
"required": [
|
||||||
|
"abstract",
|
||||||
|
"concretes"
|
||||||
|
],
|
||||||
|
"properties": {
|
||||||
|
"abstract": {
|
||||||
|
"type": "object",
|
||||||
|
"required": [
|
||||||
|
"name",
|
||||||
|
"startcat",
|
||||||
|
"funs"
|
||||||
|
],
|
||||||
|
"properties": {
|
||||||
|
"name": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"startcat": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"funs": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "object",
|
||||||
|
"required": [
|
||||||
|
"args",
|
||||||
|
"cat"
|
||||||
|
],
|
||||||
|
"properties": {
|
||||||
|
"args": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"cat": {
|
||||||
|
"type": "string"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"concretes": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": {
|
||||||
|
"required": [
|
||||||
|
"flags",
|
||||||
|
"productions",
|
||||||
|
"functions",
|
||||||
|
"sequences",
|
||||||
|
"categories",
|
||||||
|
"totalfids"
|
||||||
|
],
|
||||||
|
"properties": {
|
||||||
|
"flags": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": ["string", "number"]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"productions": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"oneOf": [
|
||||||
|
{
|
||||||
|
"$ref": "#/definitions/apply"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"$ref": "#/definitions/coerce"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"functions": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"title": "CncFun",
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"name": {
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
"lins": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "integer"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"sequences": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"$ref": "#/definitions/sym"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"categories": {
|
||||||
|
"type": "object",
|
||||||
|
"additionalProperties": {
|
||||||
|
"title": "CncCat",
|
||||||
|
"type": "object",
|
||||||
|
"required": [
|
||||||
|
"start",
|
||||||
|
"end"
|
||||||
|
],
|
||||||
|
"properties": {
|
||||||
|
"start": {
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
"end": {
|
||||||
|
"type": "integer"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"totalfids": {
|
||||||
|
"type": "integer"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"definitions": {
|
||||||
|
"apply": {
|
||||||
|
"required": [
|
||||||
|
"type",
|
||||||
|
"fid",
|
||||||
|
"args"
|
||||||
|
],
|
||||||
|
"properties": {
|
||||||
|
"type": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": ["Apply"]
|
||||||
|
},
|
||||||
|
"fid": {
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
"args": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"$ref": "#/definitions/parg"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"coerce": {
|
||||||
|
"required": [
|
||||||
|
"type",
|
||||||
|
"arg"
|
||||||
|
],
|
||||||
|
"properties": {
|
||||||
|
"type": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": ["Coerce"]
|
||||||
|
},
|
||||||
|
"arg": {
|
||||||
|
"type": "integer"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"parg": {
|
||||||
|
"required": [
|
||||||
|
"type",
|
||||||
|
"hypos",
|
||||||
|
"fid"
|
||||||
|
],
|
||||||
|
"properties": {
|
||||||
|
"type": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": ["PArg"]
|
||||||
|
},
|
||||||
|
"hypos": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "integer"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"fid": {
|
||||||
|
"type": "integer"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"sym": {
|
||||||
|
"title": "Sym",
|
||||||
|
"required": [
|
||||||
|
"type",
|
||||||
|
"args"
|
||||||
|
],
|
||||||
|
"properties": {
|
||||||
|
"type": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": [
|
||||||
|
"SymCat",
|
||||||
|
"SymLit",
|
||||||
|
"SymVar",
|
||||||
|
"SymKS",
|
||||||
|
"SymKP",
|
||||||
|
"SymNE"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"args": {
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"anyOf": [
|
||||||
|
{
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "integer"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"$ref": "#/definitions/sym"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -7,6 +7,7 @@ import GF.Compile as S(batchCompile,link,srcAbsName)
|
|||||||
import GF.CompileInParallel as P(parallelBatchCompile)
|
import GF.CompileInParallel as P(parallelBatchCompile)
|
||||||
import GF.Compile.Export
|
import GF.Compile.Export
|
||||||
import GF.Compile.ConcreteToHaskell(concretes2haskell)
|
import GF.Compile.ConcreteToHaskell(concretes2haskell)
|
||||||
|
import GF.Compile.GrammarToCanonical--(concretes2canonical)
|
||||||
import GF.Compile.CFGtoPGF
|
import GF.Compile.CFGtoPGF
|
||||||
import GF.Compile.GetGrammar
|
import GF.Compile.GetGrammar
|
||||||
import GF.Grammar.BNFC
|
import GF.Grammar.BNFC
|
||||||
@@ -17,12 +18,13 @@ import GF.Infra.UseIO
|
|||||||
import GF.Infra.Option
|
import GF.Infra.Option
|
||||||
import GF.Data.ErrM
|
import GF.Data.ErrM
|
||||||
import GF.System.Directory
|
import GF.System.Directory
|
||||||
import GF.Text.Pretty(render)
|
import GF.Text.Pretty(render,render80)
|
||||||
|
|
||||||
import Data.Maybe
|
import Data.Maybe
|
||||||
import qualified Data.Map as Map
|
import qualified Data.Map as Map
|
||||||
import qualified Data.Set as Set
|
import qualified Data.Set as Set
|
||||||
import qualified Data.ByteString.Lazy as BSL
|
import qualified Data.ByteString.Lazy as BSL
|
||||||
|
import GF.Grammar.CanonicalJSON (encodeJSON)
|
||||||
import System.FilePath
|
import System.FilePath
|
||||||
import Control.Monad(when,unless,forM_)
|
import Control.Monad(when,unless,forM_)
|
||||||
|
|
||||||
@@ -47,7 +49,7 @@ mainGFC opts fs = do
|
|||||||
compileSourceFiles :: Options -> [FilePath] -> IOE ()
|
compileSourceFiles :: Options -> [FilePath] -> IOE ()
|
||||||
compileSourceFiles opts fs =
|
compileSourceFiles opts fs =
|
||||||
do output <- batchCompile opts fs
|
do output <- batchCompile opts fs
|
||||||
cncs2haskell output
|
exportCanonical output
|
||||||
unless (flag optStopAfterPhase opts == Compile) $
|
unless (flag optStopAfterPhase opts == Compile) $
|
||||||
linkGrammars opts output
|
linkGrammars opts output
|
||||||
where
|
where
|
||||||
@@ -55,15 +57,35 @@ compileSourceFiles opts fs =
|
|||||||
batchCompile' opts fs = do (t,cnc_gr) <- S.batchCompile opts fs
|
batchCompile' opts fs = do (t,cnc_gr) <- S.batchCompile opts fs
|
||||||
return (t,[cnc_gr])
|
return (t,[cnc_gr])
|
||||||
|
|
||||||
cncs2haskell output =
|
exportCanonical (_time, canonical) =
|
||||||
when (FmtHaskell `elem` flag optOutputFormats opts &&
|
do when (FmtHaskell `elem` ofmts && haskellOption opts HaskellConcrete) $
|
||||||
haskellOption opts HaskellConcrete) $
|
mapM_ cnc2haskell canonical
|
||||||
mapM_ cnc2haskell (snd output)
|
when (FmtCanonicalGF `elem` ofmts) $
|
||||||
|
do createDirectoryIfMissing False "canonical"
|
||||||
|
mapM_ abs2canonical canonical
|
||||||
|
mapM_ cnc2canonical canonical
|
||||||
|
when (FmtCanonicalJson `elem` ofmts) $ mapM_ grammar2json canonical
|
||||||
|
where
|
||||||
|
ofmts = flag optOutputFormats opts
|
||||||
|
|
||||||
cnc2haskell (cnc,gr) =
|
cnc2haskell (cnc,gr) =
|
||||||
mapM_ writeHs $ concretes2haskell opts (srcAbsName gr cnc) gr
|
do mapM_ writeExport $ concretes2haskell opts (srcAbsName gr cnc) gr
|
||||||
|
|
||||||
writeHs (path,s) = writing opts path $ writeUTF8File path s
|
abs2canonical (cnc,gr) =
|
||||||
|
writeExport ("canonical/"++render absname++".gf",render80 canAbs)
|
||||||
|
where
|
||||||
|
absname = srcAbsName gr cnc
|
||||||
|
canAbs = abstract2canonical absname gr
|
||||||
|
|
||||||
|
cnc2canonical (cnc,gr) =
|
||||||
|
mapM_ (writeExport.fmap render80) $
|
||||||
|
concretes2canonical opts (srcAbsName gr cnc) gr
|
||||||
|
|
||||||
|
grammar2json (cnc,gr) = encodeJSON (render absname ++ ".json") gr_canon
|
||||||
|
where absname = srcAbsName gr cnc
|
||||||
|
gr_canon = grammar2canonical opts absname gr
|
||||||
|
|
||||||
|
writeExport (path,s) = writing opts path $ writeUTF8File path s
|
||||||
|
|
||||||
|
|
||||||
-- | Create a @.pgf@ file (and possibly files in other formats, if specified
|
-- | Create a @.pgf@ file (and possibly files in other formats, if specified
|
||||||
@@ -80,7 +102,9 @@ linkGrammars opts (t_src,~cnc_grs@(~(cnc,gr):_)) =
|
|||||||
if t_pgf >= Just t_src
|
if t_pgf >= Just t_src
|
||||||
then putIfVerb opts $ pgfFile ++ " is up-to-date."
|
then putIfVerb opts $ pgfFile ++ " is up-to-date."
|
||||||
else do pgfs <- mapM (link opts) cnc_grs
|
else do pgfs <- mapM (link opts) cnc_grs
|
||||||
let pgf = foldl1 unionPGF pgfs
|
let pgf0 = foldl1 unionPGF pgfs
|
||||||
|
probs <- maybe (return . defaultProbabilities) readProbabilitiesFromFile (flag optProbsFile opts) pgf0
|
||||||
|
let pgf = setProbabilities probs pgf0
|
||||||
writePGF opts pgf
|
writePGF opts pgf
|
||||||
writeOutputs opts pgf
|
writeOutputs opts pgf
|
||||||
|
|
||||||
@@ -115,7 +139,9 @@ unionPGFFiles opts fs =
|
|||||||
doIt =
|
doIt =
|
||||||
do pgfs <- mapM readPGFVerbose fs
|
do pgfs <- mapM readPGFVerbose fs
|
||||||
let pgf0 = foldl1 unionPGF pgfs
|
let pgf0 = foldl1 unionPGF pgfs
|
||||||
pgf = if flag optOptimizePGF opts then optimizePGF pgf0 else pgf0
|
pgf1 = if flag optOptimizePGF opts then optimizePGF pgf0 else pgf0
|
||||||
|
probs <- liftIO (maybe (return . defaultProbabilities) readProbabilitiesFromFile (flag optProbsFile opts) pgf1)
|
||||||
|
let pgf = setProbabilities probs pgf1
|
||||||
pgfFile = outputPath opts (grammarName opts pgf <.> "pgf")
|
pgfFile = outputPath opts (grammarName opts pgf <.> "pgf")
|
||||||
if pgfFile `elem` fs
|
if pgfFile `elem` fs
|
||||||
then putStrLnE $ "Refusing to overwrite " ++ pgfFile
|
then putStrLnE $ "Refusing to overwrite " ++ pgfFile
|
||||||
|
|||||||
313
src/compiler/GF/Grammar/Canonical.hs
Normal file
313
src/compiler/GF/Grammar/Canonical.hs
Normal file
@@ -0,0 +1,313 @@
|
|||||||
|
-- |
|
||||||
|
-- Module : GF.Grammar.Canonical
|
||||||
|
-- Stability : provisional
|
||||||
|
--
|
||||||
|
-- Abstract syntax for canonical GF grammars, i.e. what's left after
|
||||||
|
-- high-level constructions such as functors and opers have been eliminated
|
||||||
|
-- by partial evaluation. This is intended as a common intermediate
|
||||||
|
-- representation to simplify export to other formats.
|
||||||
|
|
||||||
|
{-# LANGUAGE DeriveTraversable #-}
|
||||||
|
module GF.Grammar.Canonical where
|
||||||
|
import Prelude hiding ((<>))
|
||||||
|
import GF.Text.Pretty
|
||||||
|
|
||||||
|
-- | A Complete grammar
|
||||||
|
data Grammar = Grammar Abstract [Concrete] deriving Show
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- ** Abstract Syntax
|
||||||
|
|
||||||
|
-- | Abstract Syntax
|
||||||
|
data Abstract = Abstract ModId Flags [CatDef] [FunDef] deriving Show
|
||||||
|
abstrName (Abstract mn _ _ _) = mn
|
||||||
|
|
||||||
|
data CatDef = CatDef CatId [CatId] deriving Show
|
||||||
|
data FunDef = FunDef FunId Type deriving Show
|
||||||
|
data Type = Type [TypeBinding] TypeApp deriving Show
|
||||||
|
data TypeApp = TypeApp CatId [Type] deriving Show
|
||||||
|
|
||||||
|
data TypeBinding = TypeBinding VarId Type deriving Show
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- ** Concreate syntax
|
||||||
|
|
||||||
|
-- | Concrete Syntax
|
||||||
|
data Concrete = Concrete ModId ModId Flags [ParamDef] [LincatDef] [LinDef]
|
||||||
|
deriving Show
|
||||||
|
concName (Concrete cnc _ _ _ _ _) = cnc
|
||||||
|
|
||||||
|
data ParamDef = ParamDef ParamId [ParamValueDef]
|
||||||
|
| ParamAliasDef ParamId LinType
|
||||||
|
deriving Show
|
||||||
|
data LincatDef = LincatDef CatId LinType deriving Show
|
||||||
|
data LinDef = LinDef FunId [VarId] LinValue deriving Show
|
||||||
|
|
||||||
|
-- | Linearization type, RHS of @lincat@
|
||||||
|
data LinType = FloatType
|
||||||
|
| IntType
|
||||||
|
| ParamType ParamType
|
||||||
|
| RecordType [RecordRowType]
|
||||||
|
| StrType
|
||||||
|
| TableType LinType LinType
|
||||||
|
| TupleType [LinType]
|
||||||
|
deriving (Eq,Ord,Show)
|
||||||
|
|
||||||
|
newtype ParamType = ParamTypeId ParamId deriving (Eq,Ord,Show)
|
||||||
|
|
||||||
|
-- | Linearization value, RHS of @lin@
|
||||||
|
data LinValue = ConcatValue LinValue LinValue
|
||||||
|
| LiteralValue LinLiteral
|
||||||
|
| ErrorValue String
|
||||||
|
| ParamConstant ParamValue
|
||||||
|
| PredefValue PredefId
|
||||||
|
| RecordValue [RecordRowValue]
|
||||||
|
| TableValue LinType [TableRowValue]
|
||||||
|
--- | VTableValue LinType [LinValue]
|
||||||
|
| TupleValue [LinValue]
|
||||||
|
| VariantValue [LinValue]
|
||||||
|
| VarValue VarValueId
|
||||||
|
| PreValue [([String], LinValue)] LinValue
|
||||||
|
| Projection LinValue LabelId
|
||||||
|
| Selection LinValue LinValue
|
||||||
|
| CommentedValue String LinValue
|
||||||
|
deriving (Eq,Ord,Show)
|
||||||
|
|
||||||
|
data LinLiteral = FloatConstant Float
|
||||||
|
| IntConstant Int
|
||||||
|
| StrConstant String
|
||||||
|
deriving (Eq,Ord,Show)
|
||||||
|
|
||||||
|
data LinPattern = ParamPattern ParamPattern
|
||||||
|
| RecordPattern [RecordRow LinPattern]
|
||||||
|
| TuplePattern [LinPattern]
|
||||||
|
| WildPattern
|
||||||
|
deriving (Eq,Ord,Show)
|
||||||
|
|
||||||
|
type ParamValue = Param LinValue
|
||||||
|
type ParamPattern = Param LinPattern
|
||||||
|
type ParamValueDef = Param ParamId
|
||||||
|
|
||||||
|
data Param arg = Param ParamId [arg]
|
||||||
|
deriving (Eq,Ord,Show,Functor,Foldable,Traversable)
|
||||||
|
|
||||||
|
type RecordRowType = RecordRow LinType
|
||||||
|
type RecordRowValue = RecordRow LinValue
|
||||||
|
type TableRowValue = TableRow LinValue
|
||||||
|
|
||||||
|
data RecordRow rhs = RecordRow LabelId rhs
|
||||||
|
deriving (Eq,Ord,Show,Functor,Foldable,Traversable)
|
||||||
|
data TableRow rhs = TableRow LinPattern rhs
|
||||||
|
deriving (Eq,Ord,Show,Functor,Foldable,Traversable)
|
||||||
|
|
||||||
|
-- *** Identifiers in Concrete Syntax
|
||||||
|
|
||||||
|
newtype PredefId = PredefId Id deriving (Eq,Ord,Show)
|
||||||
|
newtype LabelId = LabelId Id deriving (Eq,Ord,Show)
|
||||||
|
data VarValueId = VarValueId QualId deriving (Eq,Ord,Show)
|
||||||
|
|
||||||
|
-- | Name of param type or param value
|
||||||
|
newtype ParamId = ParamId QualId deriving (Eq,Ord,Show)
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- ** Used in both Abstract and Concrete Syntax
|
||||||
|
|
||||||
|
newtype ModId = ModId Id deriving (Eq,Ord,Show)
|
||||||
|
|
||||||
|
newtype CatId = CatId Id deriving (Eq,Ord,Show)
|
||||||
|
newtype FunId = FunId Id deriving (Eq,Show)
|
||||||
|
|
||||||
|
data VarId = Anonymous | VarId Id deriving Show
|
||||||
|
|
||||||
|
newtype Flags = Flags [(FlagName,FlagValue)] deriving Show
|
||||||
|
type FlagName = Id
|
||||||
|
data FlagValue = Str String | Int Int | Flt Double deriving Show
|
||||||
|
|
||||||
|
|
||||||
|
-- *** Identifiers
|
||||||
|
|
||||||
|
type Id = String
|
||||||
|
data QualId = Qual ModId Id | Unqual Id deriving (Eq,Ord,Show)
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- ** Pretty printing
|
||||||
|
|
||||||
|
instance Pretty Grammar where
|
||||||
|
pp (Grammar abs cncs) = abs $+$ vcat cncs
|
||||||
|
|
||||||
|
instance Pretty Abstract where
|
||||||
|
pp (Abstract m flags cats funs) =
|
||||||
|
"abstract" <+> m <+> "=" <+> "{" $$
|
||||||
|
flags $$
|
||||||
|
"cat" <+> fsep cats $$
|
||||||
|
"fun" <+> vcat funs $$
|
||||||
|
"}"
|
||||||
|
|
||||||
|
instance Pretty CatDef where
|
||||||
|
pp (CatDef c cs) = hsep (c:cs)<>";"
|
||||||
|
|
||||||
|
instance Pretty FunDef where
|
||||||
|
pp (FunDef f ty) = f <+> ":" <+> ty <>";"
|
||||||
|
|
||||||
|
instance Pretty Type where
|
||||||
|
pp (Type bs ty) = sep (punctuate " ->" (map pp bs ++ [pp ty]))
|
||||||
|
|
||||||
|
instance PPA Type where
|
||||||
|
ppA (Type [] (TypeApp c [])) = pp c
|
||||||
|
ppA t = parens t
|
||||||
|
|
||||||
|
instance Pretty TypeBinding where
|
||||||
|
pp (TypeBinding Anonymous (Type [] tapp)) = pp tapp
|
||||||
|
pp (TypeBinding Anonymous ty) = parens ty
|
||||||
|
pp (TypeBinding (VarId x) ty) = parens (x<+>":"<+>ty)
|
||||||
|
|
||||||
|
instance Pretty TypeApp where
|
||||||
|
pp (TypeApp c targs) = c<+>hsep (map ppA targs)
|
||||||
|
|
||||||
|
instance Pretty VarId where
|
||||||
|
pp Anonymous = pp "_"
|
||||||
|
pp (VarId x) = pp x
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
instance Pretty Concrete where
|
||||||
|
pp (Concrete cncid absid flags params lincats lins) =
|
||||||
|
"concrete" <+> cncid <+> "of" <+> absid <+> "=" <+> "{" $$
|
||||||
|
vcat params $$
|
||||||
|
section "lincat" lincats $$
|
||||||
|
section "lin" lins $$
|
||||||
|
"}"
|
||||||
|
where
|
||||||
|
section name [] = empty
|
||||||
|
section name ds = name <+> vcat (map (<> ";") ds)
|
||||||
|
|
||||||
|
instance Pretty ParamDef where
|
||||||
|
pp (ParamDef p pvs) = hang ("param"<+> p <+> "=") 4 (punctuate " |" pvs)<>";"
|
||||||
|
pp (ParamAliasDef p t) = hang ("oper"<+> p <+> "=") 4 t<>";"
|
||||||
|
|
||||||
|
instance PPA arg => Pretty (Param arg) where
|
||||||
|
pp (Param p ps) = pp p<+>sep (map ppA ps)
|
||||||
|
|
||||||
|
instance PPA arg => PPA (Param arg) where
|
||||||
|
ppA (Param p []) = pp p
|
||||||
|
ppA pv = parens pv
|
||||||
|
|
||||||
|
instance Pretty LincatDef where
|
||||||
|
pp (LincatDef c lt) = hang (c <+> "=") 4 lt
|
||||||
|
|
||||||
|
instance Pretty LinType where
|
||||||
|
pp lt = case lt of
|
||||||
|
FloatType -> pp "Float"
|
||||||
|
IntType -> pp "Int"
|
||||||
|
ParamType pt -> pp pt
|
||||||
|
RecordType rs -> block rs
|
||||||
|
StrType -> pp "Str"
|
||||||
|
TableType pt lt -> sep [pt <+> "=>",pp lt]
|
||||||
|
TupleType lts -> "<"<>punctuate "," lts<>">"
|
||||||
|
|
||||||
|
instance RhsSeparator LinType where rhsSep _ = pp ":"
|
||||||
|
|
||||||
|
instance Pretty ParamType where
|
||||||
|
pp (ParamTypeId p) = pp p
|
||||||
|
|
||||||
|
instance Pretty LinDef where
|
||||||
|
pp (LinDef f xs lv) = hang (f<+>hsep xs<+>"=") 4 lv
|
||||||
|
|
||||||
|
instance Pretty LinValue where
|
||||||
|
pp lv = case lv of
|
||||||
|
ConcatValue v1 v2 -> sep [v1 <+> "++",pp v2]
|
||||||
|
ErrorValue s -> "Predef.error"<+>doubleQuotes s
|
||||||
|
ParamConstant pv -> pp pv
|
||||||
|
Projection lv l -> ppA lv<>"."<>l
|
||||||
|
Selection tv pv -> ppA tv<>"!"<>ppA pv
|
||||||
|
VariantValue vs -> "variants"<+>block vs
|
||||||
|
CommentedValue s v -> "{-" <+> s <+> "-}" $$ v
|
||||||
|
_ -> ppA lv
|
||||||
|
|
||||||
|
instance PPA LinValue where
|
||||||
|
ppA lv = case lv of
|
||||||
|
LiteralValue l -> ppA l
|
||||||
|
ParamConstant pv -> ppA pv
|
||||||
|
PredefValue p -> ppA p
|
||||||
|
RecordValue [] -> pp "<>"
|
||||||
|
RecordValue rvs -> block rvs
|
||||||
|
PreValue alts def ->
|
||||||
|
"pre"<+>block (map alt alts++["_"<+>"=>"<+>def])
|
||||||
|
where
|
||||||
|
alt (ss,lv) = hang (hcat (punctuate "|" (map doubleQuotes ss)))
|
||||||
|
2 ("=>"<+>lv)
|
||||||
|
TableValue _ tvs -> "table"<+>block tvs
|
||||||
|
-- VTableValue t ts -> "table"<+>t<+>brackets (semiSep ts)
|
||||||
|
TupleValue lvs -> "<"<>punctuate "," lvs<>">"
|
||||||
|
VarValue v -> pp v
|
||||||
|
_ -> parens lv
|
||||||
|
|
||||||
|
instance Pretty LinLiteral where pp = ppA
|
||||||
|
|
||||||
|
instance PPA LinLiteral where
|
||||||
|
ppA l = case l of
|
||||||
|
FloatConstant f -> pp f
|
||||||
|
IntConstant n -> pp n
|
||||||
|
StrConstant s -> doubleQuotes s -- hmm
|
||||||
|
|
||||||
|
instance RhsSeparator LinValue where rhsSep _ = pp "="
|
||||||
|
|
||||||
|
instance Pretty LinPattern where
|
||||||
|
pp p =
|
||||||
|
case p of
|
||||||
|
ParamPattern pv -> pp pv
|
||||||
|
_ -> ppA p
|
||||||
|
|
||||||
|
instance PPA LinPattern where
|
||||||
|
ppA p =
|
||||||
|
case p of
|
||||||
|
ParamPattern pv -> ppA pv
|
||||||
|
RecordPattern r -> block r
|
||||||
|
TuplePattern ps -> "<"<>punctuate "," ps<>">"
|
||||||
|
WildPattern -> pp "_"
|
||||||
|
_ -> parens p
|
||||||
|
|
||||||
|
instance RhsSeparator LinPattern where rhsSep _ = pp "="
|
||||||
|
|
||||||
|
instance RhsSeparator rhs => Pretty (RecordRow rhs) where
|
||||||
|
pp (RecordRow l v) = hang (l<+>rhsSep v) 2 v
|
||||||
|
|
||||||
|
instance Pretty rhs => Pretty (TableRow rhs) where
|
||||||
|
pp (TableRow l v) = hang (l<+>"=>") 2 v
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
instance Pretty ModId where pp (ModId s) = pp s
|
||||||
|
instance Pretty CatId where pp (CatId s) = pp s
|
||||||
|
instance Pretty FunId where pp (FunId s) = pp s
|
||||||
|
instance Pretty LabelId where pp (LabelId s) = pp s
|
||||||
|
instance Pretty PredefId where pp = ppA
|
||||||
|
instance PPA PredefId where ppA (PredefId s) = "Predef."<>s
|
||||||
|
instance Pretty ParamId where pp = ppA
|
||||||
|
instance PPA ParamId where ppA (ParamId s) = pp s
|
||||||
|
instance Pretty VarValueId where pp (VarValueId s) = pp s
|
||||||
|
|
||||||
|
instance Pretty QualId where pp = ppA
|
||||||
|
|
||||||
|
instance PPA QualId where
|
||||||
|
ppA (Qual m n) = m<>"_"<>n -- hmm
|
||||||
|
ppA (Unqual n) = pp n
|
||||||
|
|
||||||
|
instance Pretty Flags where
|
||||||
|
pp (Flags []) = empty
|
||||||
|
pp (Flags flags) = "flags" <+> vcat (map ppFlag flags)
|
||||||
|
where
|
||||||
|
ppFlag (name,value) = name <+> "=" <+> value <>";"
|
||||||
|
|
||||||
|
instance Pretty FlagValue where
|
||||||
|
pp (Str s) = pp s
|
||||||
|
pp (Int i) = pp i
|
||||||
|
pp (Flt d) = pp d
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- | Pretty print atomically (i.e. wrap it in parentheses if necessary)
|
||||||
|
class Pretty a => PPA a where ppA :: a -> Doc
|
||||||
|
|
||||||
|
class Pretty rhs => RhsSeparator rhs where rhsSep :: rhs -> Doc
|
||||||
|
|
||||||
|
semiSep xs = punctuate ";" xs
|
||||||
|
block xs = braces (semiSep xs)
|
||||||
289
src/compiler/GF/Grammar/CanonicalJSON.hs
Normal file
289
src/compiler/GF/Grammar/CanonicalJSON.hs
Normal file
@@ -0,0 +1,289 @@
|
|||||||
|
module GF.Grammar.CanonicalJSON (
|
||||||
|
encodeJSON
|
||||||
|
) where
|
||||||
|
|
||||||
|
import Text.JSON
|
||||||
|
import Control.Applicative ((<|>))
|
||||||
|
import Data.Ratio (denominator, numerator)
|
||||||
|
import GF.Grammar.Canonical
|
||||||
|
|
||||||
|
|
||||||
|
encodeJSON :: FilePath -> Grammar -> IO ()
|
||||||
|
encodeJSON fpath g = writeFile fpath (encode g)
|
||||||
|
|
||||||
|
|
||||||
|
-- in general we encode grammars using JSON objects/records,
|
||||||
|
-- except for newtypes/coercions/direct values
|
||||||
|
|
||||||
|
-- the top-level definitions use normal record labels,
|
||||||
|
-- but recursive types/values/ids use labels staring with a "."
|
||||||
|
|
||||||
|
instance JSON Grammar where
|
||||||
|
showJSON (Grammar abs cncs) = makeObj [("abstract", showJSON abs), ("concretes", showJSON cncs)]
|
||||||
|
|
||||||
|
readJSON o = Grammar <$> o!"abstract" <*> o!"concretes"
|
||||||
|
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- ** Abstract Syntax
|
||||||
|
|
||||||
|
instance JSON Abstract where
|
||||||
|
showJSON (Abstract absid flags cats funs)
|
||||||
|
= makeObj [("abs", showJSON absid),
|
||||||
|
("flags", showJSON flags),
|
||||||
|
("cats", showJSON cats),
|
||||||
|
("funs", showJSON funs)]
|
||||||
|
|
||||||
|
readJSON o = Abstract
|
||||||
|
<$> o!"abs"
|
||||||
|
<*>(o!"flags" <|> return (Flags []))
|
||||||
|
<*> o!"cats"
|
||||||
|
<*> o!"funs"
|
||||||
|
|
||||||
|
instance JSON CatDef where
|
||||||
|
-- non-dependent categories are encoded as simple strings:
|
||||||
|
showJSON (CatDef c []) = showJSON c
|
||||||
|
showJSON (CatDef c cs) = makeObj [("cat", showJSON c), ("args", showJSON cs)]
|
||||||
|
|
||||||
|
readJSON o = CatDef <$> readJSON o <*> return []
|
||||||
|
<|> CatDef <$> o!"cat" <*> o!"args"
|
||||||
|
|
||||||
|
instance JSON FunDef where
|
||||||
|
showJSON (FunDef f ty) = makeObj [("fun", showJSON f), ("type", showJSON ty)]
|
||||||
|
|
||||||
|
readJSON o = FunDef <$> o!"fun" <*> o!"type"
|
||||||
|
|
||||||
|
instance JSON Type where
|
||||||
|
showJSON (Type bs ty) = makeObj [(".args", showJSON bs), (".result", showJSON ty)]
|
||||||
|
|
||||||
|
readJSON o = Type <$> o!".args" <*> o!".result"
|
||||||
|
|
||||||
|
instance JSON TypeApp where
|
||||||
|
-- non-dependent categories are encoded as simple strings:
|
||||||
|
showJSON (TypeApp c []) = showJSON c
|
||||||
|
showJSON (TypeApp c args) = makeObj [(".cat", showJSON c), (".args", showJSON args)]
|
||||||
|
|
||||||
|
readJSON o = TypeApp <$> readJSON o <*> return []
|
||||||
|
<|> TypeApp <$> o!".cat" <*> o!".args"
|
||||||
|
|
||||||
|
instance JSON TypeBinding where
|
||||||
|
-- non-dependent categories are encoded as simple strings:
|
||||||
|
showJSON (TypeBinding Anonymous (Type [] (TypeApp c []))) = showJSON c
|
||||||
|
showJSON (TypeBinding x ty) = makeObj [(".var", showJSON x), (".type", showJSON ty)]
|
||||||
|
|
||||||
|
readJSON o = do c <- readJSON o
|
||||||
|
return (TypeBinding Anonymous (Type [] (TypeApp c [])))
|
||||||
|
<|> TypeBinding <$> o!".var" <*> o!".type"
|
||||||
|
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- ** Concrete syntax
|
||||||
|
|
||||||
|
instance JSON Concrete where
|
||||||
|
showJSON (Concrete cncid absid flags params lincats lins)
|
||||||
|
= makeObj [("cnc", showJSON cncid),
|
||||||
|
("abs", showJSON absid),
|
||||||
|
("flags", showJSON flags),
|
||||||
|
("params", showJSON params),
|
||||||
|
("lincats", showJSON lincats),
|
||||||
|
("lins", showJSON lins)]
|
||||||
|
|
||||||
|
readJSON o = Concrete
|
||||||
|
<$> o!"cnc"
|
||||||
|
<*> o!"abs"
|
||||||
|
<*>(o!"flags" <|> return (Flags []))
|
||||||
|
<*> o!"params"
|
||||||
|
<*> o!"lincats"
|
||||||
|
<*> o!"lins"
|
||||||
|
|
||||||
|
instance JSON ParamDef where
|
||||||
|
showJSON (ParamDef p pvs) = makeObj [("param", showJSON p), ("values", showJSON pvs)]
|
||||||
|
showJSON (ParamAliasDef p t) = makeObj [("param", showJSON p), ("alias", showJSON t)]
|
||||||
|
|
||||||
|
readJSON o = ParamDef <$> o!"param" <*> o!"values"
|
||||||
|
<|> ParamAliasDef <$> o!"param" <*> o!"alias"
|
||||||
|
|
||||||
|
instance JSON LincatDef where
|
||||||
|
showJSON (LincatDef c lt) = makeObj [("cat", showJSON c), ("lintype", showJSON lt)]
|
||||||
|
|
||||||
|
readJSON o = LincatDef <$> o!"cat" <*> o!"lintype"
|
||||||
|
|
||||||
|
instance JSON LinDef where
|
||||||
|
showJSON (LinDef f xs lv) = makeObj [("fun", showJSON f), ("args", showJSON xs), ("lin", showJSON lv)]
|
||||||
|
|
||||||
|
readJSON o = LinDef <$> o!"fun" <*> o!"args" <*> o!"lin"
|
||||||
|
|
||||||
|
instance JSON LinType where
|
||||||
|
-- the basic types (Str, Float, Int) are encoded as strings:
|
||||||
|
showJSON (StrType) = showJSON "Str"
|
||||||
|
showJSON (FloatType) = showJSON "Float"
|
||||||
|
showJSON (IntType) = showJSON "Int"
|
||||||
|
-- parameters are also encoded as strings:
|
||||||
|
showJSON (ParamType pt) = showJSON pt
|
||||||
|
-- tables/tuples are encoded as JSON objects:
|
||||||
|
showJSON (TableType pt lt) = makeObj [(".tblarg", showJSON pt), (".tblval", showJSON lt)]
|
||||||
|
showJSON (TupleType lts) = makeObj [(".tuple", showJSON lts)]
|
||||||
|
-- records are encoded as records:
|
||||||
|
showJSON (RecordType rows) = showJSON rows
|
||||||
|
|
||||||
|
readJSON o = do "Str" <- readJSON o; return StrType
|
||||||
|
<|> do "Float" <- readJSON o; return FloatType
|
||||||
|
<|> do "Int" <- readJSON o; return IntType
|
||||||
|
<|> do ptype <- readJSON o; return (ParamType ptype)
|
||||||
|
<|> TableType <$> o!".tblarg" <*> o!".tblval"
|
||||||
|
<|> TupleType <$> o!".tuple"
|
||||||
|
<|> RecordType <$> readJSON o
|
||||||
|
|
||||||
|
instance JSON LinValue where
|
||||||
|
showJSON (LiteralValue l ) = showJSON l
|
||||||
|
-- most values are encoded as JSON objects:
|
||||||
|
showJSON (ParamConstant pv) = makeObj [(".param", showJSON pv)]
|
||||||
|
showJSON (PredefValue p ) = makeObj [(".predef", showJSON p)]
|
||||||
|
showJSON (TableValue t tvs) = makeObj [(".tblarg", showJSON t), (".tblrows", showJSON tvs)]
|
||||||
|
showJSON (TupleValue lvs) = makeObj [(".tuple", showJSON lvs)]
|
||||||
|
showJSON (VarValue v ) = makeObj [(".var", showJSON v)]
|
||||||
|
showJSON (ErrorValue s ) = makeObj [(".error", showJSON s)]
|
||||||
|
showJSON (Projection lv l ) = makeObj [(".project", showJSON lv), (".label", showJSON l)]
|
||||||
|
showJSON (Selection tv pv) = makeObj [(".select", showJSON tv), (".key", showJSON pv)]
|
||||||
|
showJSON (VariantValue vs) = makeObj [(".variants", showJSON vs)]
|
||||||
|
showJSON (PreValue pre def) = makeObj [(".pre", showJSON pre),(".default", showJSON def)]
|
||||||
|
-- records are encoded directly as JSON records:
|
||||||
|
showJSON (RecordValue rows) = showJSON rows
|
||||||
|
-- concatenation is encoded as a JSON array:
|
||||||
|
showJSON v@(ConcatValue _ _) = showJSON (flatten v [])
|
||||||
|
where flatten (ConcatValue v v') = flatten v . flatten v'
|
||||||
|
flatten v = (v :)
|
||||||
|
|
||||||
|
readJSON o = LiteralValue <$> readJSON o
|
||||||
|
<|> ParamConstant <$> o!".param"
|
||||||
|
<|> PredefValue <$> o!".predef"
|
||||||
|
<|> TableValue <$> o!".tblarg" <*> o!".tblrows"
|
||||||
|
<|> TupleValue <$> o!".tuple"
|
||||||
|
<|> VarValue <$> o!".var"
|
||||||
|
<|> ErrorValue <$> o!".error"
|
||||||
|
<|> Projection <$> o!".project" <*> o!".label"
|
||||||
|
<|> Selection <$> o!".select" <*> o!".key"
|
||||||
|
<|> VariantValue <$> o!".variants"
|
||||||
|
<|> PreValue <$> o!".pre" <*> o!".default"
|
||||||
|
<|> RecordValue <$> readJSON o
|
||||||
|
<|> do vs <- readJSON o :: Result [LinValue]
|
||||||
|
return (foldr1 ConcatValue vs)
|
||||||
|
|
||||||
|
instance JSON LinLiteral where
|
||||||
|
-- basic values (Str, Float, Int) are encoded as JSON strings/numbers:
|
||||||
|
showJSON (StrConstant s) = showJSON s
|
||||||
|
showJSON (FloatConstant f) = showJSON f
|
||||||
|
showJSON (IntConstant n) = showJSON n
|
||||||
|
|
||||||
|
readJSON = readBasicJSON StrConstant IntConstant FloatConstant
|
||||||
|
|
||||||
|
instance JSON LinPattern where
|
||||||
|
-- wildcards and patterns without arguments are encoded as strings:
|
||||||
|
showJSON (WildPattern) = showJSON "_"
|
||||||
|
showJSON (ParamPattern (Param p [])) = showJSON p
|
||||||
|
-- complex patterns are encoded as JSON objects:
|
||||||
|
showJSON (ParamPattern pv) = showJSON pv
|
||||||
|
-- and records as records:
|
||||||
|
showJSON (RecordPattern r) = showJSON r
|
||||||
|
|
||||||
|
readJSON o = do "_" <- readJSON o; return WildPattern
|
||||||
|
<|> do p <- readJSON o; return (ParamPattern (Param p []))
|
||||||
|
<|> ParamPattern <$> readJSON o
|
||||||
|
<|> RecordPattern <$> readJSON o
|
||||||
|
|
||||||
|
instance JSON arg => JSON (Param arg) where
|
||||||
|
-- parameters without arguments are encoded as strings:
|
||||||
|
showJSON (Param p []) = showJSON p
|
||||||
|
showJSON (Param p args) = makeObj [(".paramid", showJSON p), (".args", showJSON args)]
|
||||||
|
|
||||||
|
readJSON o = Param <$> readJSON o <*> return []
|
||||||
|
<|> Param <$> o!".paramid" <*> o!".args"
|
||||||
|
|
||||||
|
instance JSON a => JSON (RecordRow a) where
|
||||||
|
-- record rows and lists of record rows are both encoded as JSON records (i.e., objects)
|
||||||
|
showJSON row = showJSONs [row]
|
||||||
|
showJSONs rows = makeObj (map toRow rows)
|
||||||
|
where toRow (RecordRow (LabelId lbl) val) = (lbl, showJSON val)
|
||||||
|
|
||||||
|
readJSON obj = head <$> readJSONs obj
|
||||||
|
readJSONs obj = mapM fromRow (assocsJSObject obj)
|
||||||
|
where fromRow (lbl, jsvalue) = do value <- readJSON jsvalue
|
||||||
|
return (RecordRow (LabelId lbl) value)
|
||||||
|
|
||||||
|
instance JSON rhs => JSON (TableRow rhs) where
|
||||||
|
showJSON (TableRow l v) = makeObj [(".pattern", showJSON l), (".value", showJSON v)]
|
||||||
|
|
||||||
|
readJSON o = TableRow <$> o!".pattern" <*> o!".value"
|
||||||
|
|
||||||
|
|
||||||
|
-- *** Identifiers in Concrete Syntax
|
||||||
|
|
||||||
|
instance JSON PredefId where showJSON (PredefId s) = showJSON s ; readJSON = fmap PredefId . readJSON
|
||||||
|
instance JSON LabelId where showJSON (LabelId s) = showJSON s ; readJSON = fmap LabelId . readJSON
|
||||||
|
instance JSON VarValueId where showJSON (VarValueId s) = showJSON s ; readJSON = fmap VarValueId . readJSON
|
||||||
|
instance JSON ParamId where showJSON (ParamId s) = showJSON s ; readJSON = fmap ParamId . readJSON
|
||||||
|
instance JSON ParamType where showJSON (ParamTypeId s) = showJSON s ; readJSON = fmap ParamTypeId . readJSON
|
||||||
|
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- ** Used in both Abstract and Concrete Syntax
|
||||||
|
|
||||||
|
instance JSON ModId where showJSON (ModId s) = showJSON s ; readJSON = fmap ModId . readJSON
|
||||||
|
instance JSON CatId where showJSON (CatId s) = showJSON s ; readJSON = fmap CatId . readJSON
|
||||||
|
instance JSON FunId where showJSON (FunId s) = showJSON s ; readJSON = fmap FunId . readJSON
|
||||||
|
|
||||||
|
instance JSON VarId where
|
||||||
|
-- the anonymous variable is the underscore:
|
||||||
|
showJSON Anonymous = showJSON "_"
|
||||||
|
showJSON (VarId x) = showJSON x
|
||||||
|
|
||||||
|
readJSON o = do "_" <- readJSON o; return Anonymous
|
||||||
|
<|> VarId <$> readJSON o
|
||||||
|
|
||||||
|
instance JSON QualId where
|
||||||
|
showJSON (Qual (ModId m) n) = showJSON (m++"."++n)
|
||||||
|
showJSON (Unqual n) = showJSON n
|
||||||
|
|
||||||
|
readJSON o = do qualid <- readJSON o
|
||||||
|
let (mod, id) = span (/= '.') qualid
|
||||||
|
return $ if null mod then Unqual id else Qual (ModId mod) id
|
||||||
|
|
||||||
|
instance JSON Flags where
|
||||||
|
-- flags are encoded directly as JSON records (i.e., objects):
|
||||||
|
showJSON (Flags fs) = makeObj [(f, showJSON v) | (f, v) <- fs]
|
||||||
|
|
||||||
|
readJSON obj = Flags <$> mapM fromRow (assocsJSObject obj)
|
||||||
|
where fromRow (lbl, jsvalue) = do value <- readJSON jsvalue
|
||||||
|
return (lbl, value)
|
||||||
|
|
||||||
|
instance JSON FlagValue where
|
||||||
|
-- flag values are encoded as basic JSON types:
|
||||||
|
showJSON (Str s) = showJSON s
|
||||||
|
showJSON (Int i) = showJSON i
|
||||||
|
showJSON (Flt f) = showJSON f
|
||||||
|
|
||||||
|
readJSON = readBasicJSON Str Int Flt
|
||||||
|
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- ** Convenience functions
|
||||||
|
|
||||||
|
(!) :: JSON a => JSValue -> String -> Result a
|
||||||
|
obj ! key = maybe (fail $ "CanonicalJSON.(!): Could not find key: " ++ show key)
|
||||||
|
readJSON
|
||||||
|
(lookup key (assocsJSObject obj))
|
||||||
|
|
||||||
|
assocsJSObject :: JSValue -> [(String, JSValue)]
|
||||||
|
assocsJSObject (JSObject o) = fromJSObject o
|
||||||
|
assocsJSObject (JSArray _) = fail $ "CanonicalJSON.assocsJSObject: Expected a JSON object, found an Array"
|
||||||
|
assocsJSObject jsvalue = fail $ "CanonicalJSON.assocsJSObject: Expected a JSON object, found " ++ show jsvalue
|
||||||
|
|
||||||
|
|
||||||
|
readBasicJSON :: (JSON int, Integral int, JSON flt, RealFloat flt) =>
|
||||||
|
(String -> v) -> (int -> v) -> (flt -> v) -> JSValue -> Result v
|
||||||
|
readBasicJSON str int flt o
|
||||||
|
= str <$> readJSON o
|
||||||
|
<|> int_or_flt <$> readJSON o
|
||||||
|
where int_or_flt f | f == fromIntegral n = int n
|
||||||
|
| otherwise = flt f
|
||||||
|
where n = round f
|
||||||
@@ -209,7 +209,7 @@ ppTerm q d (S x y) = case x of
|
|||||||
ppTerm q d (ExtR x y) = prec d 3 (ppTerm q 3 x <+> "**" <+> ppTerm q 4 y)
|
ppTerm q d (ExtR x y) = prec d 3 (ppTerm q 3 x <+> "**" <+> ppTerm q 4 y)
|
||||||
ppTerm q d (App x y) = prec d 4 (ppTerm q 4 x <+> ppTerm q 5 y)
|
ppTerm q d (App x y) = prec d 4 (ppTerm q 4 x <+> ppTerm q 5 y)
|
||||||
ppTerm q d (V e es) = hang "table" 2 (sep [ppTerm q 6 e,brackets (fsep (punctuate ';' (map (ppTerm q 0) es)))])
|
ppTerm q d (V e es) = hang "table" 2 (sep [ppTerm q 6 e,brackets (fsep (punctuate ';' (map (ppTerm q 0) es)))])
|
||||||
ppTerm q d (FV es) = "variants" <+> braces (fsep (punctuate ';' (map (ppTerm q 0) es)))
|
ppTerm q d (FV es) = prec d 4 ("variants" <+> braces (fsep (punctuate ';' (map (ppTerm q 0) es))))
|
||||||
ppTerm q d (AdHocOverload es) = "overload" <+> braces (fsep (punctuate ';' (map (ppTerm q 0) es)))
|
ppTerm q d (AdHocOverload es) = "overload" <+> braces (fsep (punctuate ';' (map (ppTerm q 0) es)))
|
||||||
ppTerm q d (Alts e xs) = prec d 4 ("pre" <+> braces (ppTerm q 0 e <> ';' <+> fsep (punctuate ';' (map (ppAltern q) xs))))
|
ppTerm q d (Alts e xs) = prec d 4 ("pre" <+> braces (ppTerm q 0 e <> ';' <+> fsep (punctuate ';' (map (ppAltern q) xs))))
|
||||||
ppTerm q d (Strs es) = "strs" <+> braces (fsep (punctuate ';' (map (ppTerm q 0) es)))
|
ppTerm q d (Strs es) = "strs" <+> braces (fsep (punctuate ';' (map (ppTerm q 0) es)))
|
||||||
|
|||||||
@@ -40,6 +40,9 @@ tvar = TId
|
|||||||
tcon0 = TId
|
tcon0 = TId
|
||||||
tcon c = foldl TAp (TId c)
|
tcon c = foldl TAp (TId c)
|
||||||
|
|
||||||
|
lets [] e = e
|
||||||
|
lets ds e = Lets ds e
|
||||||
|
|
||||||
let1 x xe e = Lets [(x,xe)] e
|
let1 x xe e = Lets [(x,xe)] e
|
||||||
single x = List [x]
|
single x = List [x]
|
||||||
|
|
||||||
@@ -113,7 +116,8 @@ instance Pretty Exp where
|
|||||||
Op e1 op e2 -> hang (ppB e1<+>op) 2 (ppB e2)
|
Op e1 op e2 -> hang (ppB e1<+>op) 2 (ppB e2)
|
||||||
Lets bs e -> sep ["let"<+>vcat [hang (x<+>"=") 2 xe|(x,xe)<-bs],
|
Lets bs e -> sep ["let"<+>vcat [hang (x<+>"=") 2 xe|(x,xe)<-bs],
|
||||||
"in" <+>e]
|
"in" <+>e]
|
||||||
LambdaCase alts -> hang "\\case" 4 (vcat [p<+>"->"<+>e|(p,e)<-alts])
|
LambdaCase alts ->
|
||||||
|
hang "\\case" 2 (vcat [hang (p<+>"->") 2 e|(p,e)<-alts])
|
||||||
_ -> ppB e
|
_ -> ppB e
|
||||||
|
|
||||||
ppB e = case flatAp e of f:as -> hang (ppA f) 2 (sep (map ppA as))
|
ppB e = case flatAp e of f:as -> hang (ppA f) 2 (sep (map ppA as))
|
||||||
|
|||||||
@@ -87,7 +87,10 @@ data Phase = Preproc | Convert | Compile | Link
|
|||||||
deriving (Show,Eq,Ord)
|
deriving (Show,Eq,Ord)
|
||||||
|
|
||||||
data OutputFormat = FmtPGFPretty
|
data OutputFormat = FmtPGFPretty
|
||||||
|
| FmtCanonicalGF
|
||||||
|
| FmtCanonicalJson
|
||||||
| FmtJavaScript
|
| FmtJavaScript
|
||||||
|
| FmtJSON
|
||||||
| FmtPython
|
| FmtPython
|
||||||
| FmtHaskell
|
| FmtHaskell
|
||||||
| FmtJava
|
| FmtJava
|
||||||
@@ -325,7 +328,8 @@ optDescr =
|
|||||||
Option [] ["gfo-dir"] (ReqArg gfoDir "DIR") "Directory to put .gfo files in (default = '.').",
|
Option [] ["gfo-dir"] (ReqArg gfoDir "DIR") "Directory to put .gfo files in (default = '.').",
|
||||||
Option ['f'] ["output-format"] (ReqArg outFmt "FMT")
|
Option ['f'] ["output-format"] (ReqArg outFmt "FMT")
|
||||||
(unlines ["Output format. FMT can be one of:",
|
(unlines ["Output format. FMT can be one of:",
|
||||||
"Multiple concrete: pgf (default), js, pgf_pretty, prolog, python, ...", -- gar,
|
"Canonical GF grammar: canonical_gf, canonical_json, (and haskell with option --haskell=concrete)",
|
||||||
|
"Multiple concrete: pgf (default), json, js, pgf_pretty, prolog, python, ...", -- gar,
|
||||||
"Single concrete only: bnf, ebnf, fa, gsl, jsgf, regexp, slf, srgs_xml, srgs_abnf, vxml, ....", -- cf, lbnf,
|
"Single concrete only: bnf, ebnf, fa, gsl, jsgf, regexp, slf, srgs_xml, srgs_abnf, vxml, ....", -- cf, lbnf,
|
||||||
"Abstract only: haskell, ..."]), -- prolog_abs,
|
"Abstract only: haskell, ..."]), -- prolog_abs,
|
||||||
Option [] ["sisr"] (ReqArg sisrFmt "FMT")
|
Option [] ["sisr"] (ReqArg sisrFmt "FMT")
|
||||||
@@ -468,7 +472,10 @@ outputFormats = map fst outputFormatsExpl
|
|||||||
outputFormatsExpl :: [((String,OutputFormat),String)]
|
outputFormatsExpl :: [((String,OutputFormat),String)]
|
||||||
outputFormatsExpl =
|
outputFormatsExpl =
|
||||||
[(("pgf_pretty", FmtPGFPretty),"human-readable pgf"),
|
[(("pgf_pretty", FmtPGFPretty),"human-readable pgf"),
|
||||||
|
(("canonical_gf", FmtCanonicalGF),"Canonical GF source files"),
|
||||||
|
(("canonical_json", FmtCanonicalJson),"Canonical JSON source files"),
|
||||||
(("js", FmtJavaScript),"JavaScript (whole grammar)"),
|
(("js", FmtJavaScript),"JavaScript (whole grammar)"),
|
||||||
|
(("json", FmtJSON),"JSON (whole grammar)"),
|
||||||
(("python", FmtPython),"Python (whole grammar)"),
|
(("python", FmtPython),"Python (whole grammar)"),
|
||||||
(("haskell", FmtHaskell),"Haskell (abstract syntax)"),
|
(("haskell", FmtHaskell),"Haskell (abstract syntax)"),
|
||||||
(("java", FmtJava),"Java (abstract syntax)"),
|
(("java", FmtJava),"Java (abstract syntax)"),
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ import GF.System.Console (setConsoleEncoding)
|
|||||||
-- Run @gf --help@ for usage info.
|
-- Run @gf --help@ for usage info.
|
||||||
main :: IO ()
|
main :: IO ()
|
||||||
main = do
|
main = do
|
||||||
setConsoleEncoding
|
--setConsoleEncoding
|
||||||
uncurry mainOpts =<< getOptions
|
uncurry mainOpts =<< getOptions
|
||||||
|
|
||||||
-- | Get and parse GF command line arguments. Fix relative paths.
|
-- | Get and parse GF command line arguments. Fix relative paths.
|
||||||
|
|||||||
@@ -43,6 +43,7 @@ import GF.Infra.UseIO(readBinaryFile,writeBinaryFile,ePutStrLn)
|
|||||||
import GF.Infra.SIO(captureSIO)
|
import GF.Infra.SIO(captureSIO)
|
||||||
import GF.Data.Utilities(apSnd,mapSnd)
|
import GF.Data.Utilities(apSnd,mapSnd)
|
||||||
import qualified PGFService as PS
|
import qualified PGFService as PS
|
||||||
|
import qualified ExampleService as ES
|
||||||
import Data.Version(showVersion)
|
import Data.Version(showVersion)
|
||||||
import Paths_gf(getDataDir,version)
|
import Paths_gf(getDataDir,version)
|
||||||
import GF.Infra.BuildInfo (buildInfo)
|
import GF.Infra.BuildInfo (buildInfo)
|
||||||
@@ -170,6 +171,7 @@ handle logLn documentroot state0 cache execute1 stateVar
|
|||||||
(_ ,_ ,".pgf") -> do --debug $ "PGF service: "++path
|
(_ ,_ ,".pgf") -> do --debug $ "PGF service: "++path
|
||||||
wrapCGI $ PS.cgiMain' cache path
|
wrapCGI $ PS.cgiMain' cache path
|
||||||
(dir,"grammars.cgi",_ ) -> grammarList dir (decoded qs)
|
(dir,"grammars.cgi",_ ) -> grammarList dir (decoded qs)
|
||||||
|
(dir ,"exb.fcgi" ,_ ) -> wrapCGI $ ES.cgiMain' root dir (PS.pgfCache cache)
|
||||||
_ -> serveStaticFile rpath path
|
_ -> serveStaticFile rpath path
|
||||||
where path = translatePath rpath
|
where path = translatePath rpath
|
||||||
_ -> return $ resp400 upath
|
_ -> return $ resp400 upath
|
||||||
|
|||||||
@@ -57,7 +57,7 @@ pgfToCFG pgf lang = mkCFG (showCId (lookStartCat pgf)) extCats (startRules ++ co
|
|||||||
|
|
||||||
topdownRules cat = f cat []
|
topdownRules cat = f cat []
|
||||||
where
|
where
|
||||||
f cat rules = maybe rules (Set.fold g rules) (IntMap.lookup cat (productions cnc))
|
f cat rules = maybe rules (Set.foldr g rules) (IntMap.lookup cat (productions cnc))
|
||||||
|
|
||||||
g (PApply funid args) rules = (cncfuns cnc ! funid,args) : rules
|
g (PApply funid args) rules = (cncfuns cnc ! funid,args) : rules
|
||||||
g (PCoerce cat) rules = f cat rules
|
g (PCoerce cat) rules = f cat rules
|
||||||
|
|||||||
@@ -8,13 +8,13 @@ import System.Directory as D
|
|||||||
doesDirectoryExist,doesFileExist,getModificationTime,
|
doesDirectoryExist,doesFileExist,getModificationTime,
|
||||||
getCurrentDirectory,getDirectoryContents,getPermissions,
|
getCurrentDirectory,getDirectoryContents,getPermissions,
|
||||||
removeFile,renameFile)
|
removeFile,renameFile)
|
||||||
import Data.Time.Compat
|
--import Data.Time.Compat
|
||||||
|
|
||||||
canonicalizePath path = liftIO $ D.canonicalizePath path
|
canonicalizePath path = liftIO $ D.canonicalizePath path
|
||||||
createDirectoryIfMissing b = liftIO . D.createDirectoryIfMissing b
|
createDirectoryIfMissing b = liftIO . D.createDirectoryIfMissing b
|
||||||
doesDirectoryExist path = liftIO $ D.doesDirectoryExist path
|
doesDirectoryExist path = liftIO $ D.doesDirectoryExist path
|
||||||
doesFileExist path = liftIO $ D.doesFileExist path
|
doesFileExist path = liftIO $ D.doesFileExist path
|
||||||
getModificationTime path = liftIO $ fmap toUTCTime (D.getModificationTime path)
|
getModificationTime path = liftIO $ {-fmap toUTCTime-} (D.getModificationTime path)
|
||||||
getDirectoryContents path = liftIO $ D.getDirectoryContents path
|
getDirectoryContents path = liftIO $ D.getDirectoryContents path
|
||||||
|
|
||||||
getCurrentDirectory :: MonadIO io => io FilePath
|
getCurrentDirectory :: MonadIO io => io FilePath
|
||||||
|
|||||||
@@ -20,6 +20,7 @@ instance Pretty a => Pretty [a] where
|
|||||||
ppList = fsep . map pp -- hmm
|
ppList = fsep . map pp -- hmm
|
||||||
|
|
||||||
render x = PP.render (pp x)
|
render x = PP.render (pp x)
|
||||||
|
render80 x = renderStyle style{lineLength=80,ribbonsPerLine=1} x
|
||||||
renderStyle s x = PP.renderStyle s (pp x)
|
renderStyle s x = PP.renderStyle s (pp x)
|
||||||
|
|
||||||
infixl 5 $$,$+$
|
infixl 5 $$,$+$
|
||||||
|
|||||||
553
src/example-based/ExampleDemo.hs
Normal file
553
src/example-based/ExampleDemo.hs
Normal file
@@ -0,0 +1,553 @@
|
|||||||
|
module ExampleDemo (Environ,initial,getNext, provideExample, testThis,mkFuncWithArg,searchGoodTree,isMeta)
|
||||||
|
where
|
||||||
|
|
||||||
|
import PGF
|
||||||
|
--import System.IO
|
||||||
|
import Data.List
|
||||||
|
--import Control.Monad
|
||||||
|
import qualified Data.Map as Map
|
||||||
|
--import qualified Data.IntMap as IntMap
|
||||||
|
import qualified Data.Set as Set
|
||||||
|
import Data.Maybe
|
||||||
|
--import System.Environment (getArgs)
|
||||||
|
import System.Random (RandomGen) --newStdGen
|
||||||
|
|
||||||
|
|
||||||
|
type MyType = CId -- name of the categories from the program
|
||||||
|
type ConcType = CId -- categories from the resource grammar, that we parse on
|
||||||
|
type MyFunc = CId -- functions that we need to implement
|
||||||
|
--type FuncWithArg = ((MyFunc, MyType), Expr) -- function with arguments
|
||||||
|
type InterInstr = [String] -- lincats that were generated but not written to the file
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
data FuncWithArg = FuncWithArg
|
||||||
|
{getName :: MyFunc, -- name of the function to generate
|
||||||
|
getType :: MyType, -- return type of the function
|
||||||
|
getTypeArgs :: [MyType] -- types of arguments
|
||||||
|
}
|
||||||
|
deriving (Show,Eq,Ord)
|
||||||
|
|
||||||
|
-- we assume that it's for English for the moment
|
||||||
|
|
||||||
|
|
||||||
|
type TypeMap = Map.Map MyType ConcType -- mapping found from a file
|
||||||
|
|
||||||
|
type ConcMap = Map.Map MyFunc Expr -- concrete expression after parsing
|
||||||
|
|
||||||
|
data Environ = Env {getTypeMap :: TypeMap, -- mapping between a category in the grammar and a concrete type from RGL
|
||||||
|
getConcMap :: ConcMap, -- concrete expression after parsing
|
||||||
|
getSigs :: Map.Map MyType [FuncWithArg], -- functions for which we have the concrete syntax already with args
|
||||||
|
getAll :: [FuncWithArg] -- all the functions with arguments
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
getNext :: Environ -> Environ -> ([MyFunc],[MyFunc])
|
||||||
|
getNext env example_env =
|
||||||
|
let sgs = getSigs env
|
||||||
|
allfuncs = getAll env
|
||||||
|
names = Set.fromList $ map getName $ concat $ Map.elems sgs
|
||||||
|
exampleable = filter (\x -> (isJust $ getNameExpr x env)
|
||||||
|
&&
|
||||||
|
(not $ Set.member x names) -- maybe drop this if you want to also rewrite from examples...
|
||||||
|
) $ map getName allfuncs
|
||||||
|
testeable = filter (\x -> (isJust $ getNameExpr x env )
|
||||||
|
&&
|
||||||
|
(Set.member x names)
|
||||||
|
) $ map getName allfuncs
|
||||||
|
|
||||||
|
in (exampleable,testeable)
|
||||||
|
|
||||||
|
|
||||||
|
provideExample :: RandomGen gen => gen -> Environ -> MyFunc -> PGF -> PGF -> Language -> Maybe (Expr,String)
|
||||||
|
provideExample gen env myfunc parsePGF pgfFile lang =
|
||||||
|
fmap giveExample $ getNameExpr myfunc env
|
||||||
|
where
|
||||||
|
giveExample e_ =
|
||||||
|
let newexpr = head $ generateRandomFromDepth gen pgfFile e_ (Just 5) -- change here with the new random generator
|
||||||
|
ty = getType $ head $ filter (\x -> getName x == myfunc) $ getAll env
|
||||||
|
embeddedExpr = maybe "" (\x -> ", as in: " ++ q (linearize pgfFile lang x)) (embedInStart (getAll env) (Map.fromList [(ty,e_)]))
|
||||||
|
lexpr = linearize pgfFile lang newexpr
|
||||||
|
q s = sq++s++sq
|
||||||
|
sq = "\""
|
||||||
|
in (newexpr,q lexpr ++ embeddedExpr)
|
||||||
|
-- question, you need the IO monad for the random generator, how to do otherwise ??
|
||||||
|
-- question can you make the expression bold/italic - somehow distinguishable from the rest ?
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
testThis :: Environ -> MyFunc -> PGF -> Language -> Maybe String
|
||||||
|
testThis env myfunc parsePGF lang =
|
||||||
|
fmap (linearize parsePGF lang . mapToResource env . llin env) $
|
||||||
|
getNameExpr myfunc env
|
||||||
|
|
||||||
|
|
||||||
|
-- we assume that even the functions linearized by the user will still be in getSigs along with their linearization
|
||||||
|
|
||||||
|
|
||||||
|
-- fill in the blancs of an expression that we want to linearize for testing purposes
|
||||||
|
---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
llin :: Environ -> Expr -> Expr
|
||||||
|
llin env expr =
|
||||||
|
let
|
||||||
|
(id,args) = fromJust $ unApp expr
|
||||||
|
--cexpr = fromJust $ Map.lookup id (getConcMap env)
|
||||||
|
in
|
||||||
|
if any isMeta args
|
||||||
|
then let
|
||||||
|
sigs = concat $ Map.elems $ getSigs env
|
||||||
|
tys = findExprWhich sigs id
|
||||||
|
in replaceConcArg 1 tys expr env
|
||||||
|
else mkApp id $ map (llin env) args
|
||||||
|
|
||||||
|
|
||||||
|
-- argument of the meta variable to replace, list of arguments left, expression to replace, environment, current replace expression
|
||||||
|
replaceConcArg :: Int -> [MyType] -> Expr -> Environ -> Expr
|
||||||
|
replaceConcArg i [] expr env = expr
|
||||||
|
replaceConcArg i (t:ts) expr env = -- TO DO : insert randomness here !!
|
||||||
|
let ss = fromJust $ Map.lookup t $ getSigs env
|
||||||
|
args = filter (null . getTypeArgs) ss
|
||||||
|
finArg = if null args then let l = last ss in llin env (mkApp (getName l) [mkMeta j | j <- [1..(length $ getTypeArgs l)]])
|
||||||
|
else mkApp (getName $ last args) []
|
||||||
|
in
|
||||||
|
let newe = replaceOne i finArg expr
|
||||||
|
in replaceConcArg (i+1) ts newe env
|
||||||
|
|
||||||
|
-- replace a certain metavariable with a certain expression in another expression - return updated expression
|
||||||
|
replaceOne :: Int -> Expr -> Expr -> Expr
|
||||||
|
replaceOne i erep expr =
|
||||||
|
if isMeta expr && ((fromJust $ unMeta expr) == i)
|
||||||
|
then erep
|
||||||
|
else if isMeta expr then expr
|
||||||
|
else let (id,args) = fromJust $ unApp expr
|
||||||
|
in
|
||||||
|
mkApp id $ map (replaceOne i erep) args
|
||||||
|
|
||||||
|
|
||||||
|
findExprWhich :: [FuncWithArg] -> MyFunc -> [MyType]
|
||||||
|
findExprWhich lst f = getTypeArgs $ head $ filter (\x -> getName x == f) lst
|
||||||
|
|
||||||
|
|
||||||
|
mapToResource :: Environ -> Expr -> Expr
|
||||||
|
mapToResource env expr =
|
||||||
|
let (id,args) = maybe (error $ "tried to unwrap " ++ showExpr [] expr) (\x -> x) (unApp expr)
|
||||||
|
cmap = getConcMap env
|
||||||
|
cexp = maybe (error $ "didn't find " ++ showCId id ++ " in "++ show cmap) (\x -> x) (Map.lookup id cmap)
|
||||||
|
in
|
||||||
|
if null args then cexp
|
||||||
|
else let newargs = map (mapToResource env) args
|
||||||
|
in replaceAllArgs cexp 1 newargs
|
||||||
|
where
|
||||||
|
replaceAllArgs expr i [] = expr
|
||||||
|
replaceAllArgs expr i (x:xs) = replaceAllArgs (replaceOne i x expr) (i+1) xs
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
-----------------------------------------------
|
||||||
|
|
||||||
|
-- embed expression in another one from the start category
|
||||||
|
|
||||||
|
embedInStart :: [FuncWithArg] -> Map.Map MyType Expr -> Maybe Expr
|
||||||
|
embedInStart fss cs =
|
||||||
|
let currset = Map.toList cs
|
||||||
|
nextset = Map.fromList $ concat [ if elem myt (getTypeArgs farg)
|
||||||
|
then connectWithArg (myt,exp) farg else []
|
||||||
|
| (myt,exp) <- currset, farg <- fss]
|
||||||
|
nextmap = Map.union cs nextset
|
||||||
|
maybeExpr = Map.lookup startCateg nextset
|
||||||
|
in if isNothing maybeExpr then
|
||||||
|
if Map.size nextmap == Map.size cs then Nothing --error $ "could't build " ++ show startCateg ++ "with " ++ show fss
|
||||||
|
else embedInStart fss nextmap
|
||||||
|
else return $ fromJust maybeExpr
|
||||||
|
where
|
||||||
|
connectWithArg (myt,exp) farg =
|
||||||
|
let ind = head $ elemIndices myt (getTypeArgs farg)
|
||||||
|
in [(getType farg, mkApp (getName farg) $ [mkMeta i | i <- [1..ind]] ++ [exp] ++ [mkMeta i | i <- [(ind + 1)..((length $ getTypeArgs farg) - 1)]])]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
-----------------------------------------------
|
||||||
|
{-
|
||||||
|
updateConcMap :: Environ -> MyFunc -> Expr -> Environ
|
||||||
|
updateConcMap env myf expr =
|
||||||
|
Env (getTypeMap env) (Map.insert myf expr (getConcMap env)) (getSigs env) (getAll env)
|
||||||
|
|
||||||
|
|
||||||
|
updateInterInstr :: Environ -> MyType -> FuncWithArg -> Environ
|
||||||
|
updateInterInstr env myt myf =
|
||||||
|
let ii = getSigs env
|
||||||
|
newInterInstr =
|
||||||
|
maybe (Map.insert myt [myf] ii) (\x -> Map.insert myt (myf:x) ii) $ Map.lookup myt ii
|
||||||
|
in Env (getTypeMap env) (getConcMap env) newInterInstr (getAll env)
|
||||||
|
|
||||||
|
|
||||||
|
putSignatures :: Environ -> [FuncWithArg] -> Environ
|
||||||
|
putSignatures env fss =
|
||||||
|
Env (getTypeMap env) (getConcMap env) (mkSigs fss) (getAll env)
|
||||||
|
|
||||||
|
|
||||||
|
updateEnv :: Environ -> FuncWithArg -> MyType -> Expr -> Environ
|
||||||
|
updateEnv env myf myt expr =
|
||||||
|
let ii = getSigs env
|
||||||
|
nn = getName myf
|
||||||
|
newInterInstr =
|
||||||
|
maybe (Map.insert myt [myf] ii) (\x -> Map.insert myt (myf:x) ii) $ Map.lookup myt ii
|
||||||
|
in Env (getTypeMap env) (Map.insert nn expr (getConcMap env)) newInterInstr (getAll env)
|
||||||
|
-}
|
||||||
|
|
||||||
|
mkSigs :: [FuncWithArg] -> Map.Map MyType [FuncWithArg]
|
||||||
|
mkSigs fss = Map.fromListWith (++) $ zip (map getType fss) (map (\x -> [x]) fss)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
{------------------------------------
|
||||||
|
lang :: String
|
||||||
|
lang = "Eng"
|
||||||
|
|
||||||
|
|
||||||
|
parseLang :: Language
|
||||||
|
parseLang = fromJust $ readLanguage "ParseEng"
|
||||||
|
|
||||||
|
|
||||||
|
parsePGFfile :: String
|
||||||
|
parsePGFfile = "ParseEngAbs.pgf"
|
||||||
|
------------------------------------}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
searchGoodTree :: Environ -> Expr -> [Expr] -> IO (Maybe (Expr,Expr))
|
||||||
|
searchGoodTree env expr [] = return Nothing
|
||||||
|
searchGoodTree env expr (e:es) =
|
||||||
|
do val <- debugReplaceArgs expr e env
|
||||||
|
maybe (searchGoodTree env expr es) (\x -> return $ Just (x,e)) val
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
getNameExpr :: MyFunc -> Environ -> Maybe Expr
|
||||||
|
getNameExpr myfunc env =
|
||||||
|
let allfunc = filter (\x -> getName x == myfunc) $ getAll env
|
||||||
|
in
|
||||||
|
if null allfunc then Nothing
|
||||||
|
else getExpr (head allfunc) env
|
||||||
|
|
||||||
|
-- find an expression to generate where we have all the other elements available
|
||||||
|
getExpr :: FuncWithArg -> Environ -> Maybe Expr
|
||||||
|
getExpr farg env =
|
||||||
|
let tys = getTypeArgs farg
|
||||||
|
ctx = getSigs env
|
||||||
|
lst = getConcTypes ctx tys 1
|
||||||
|
in if (all isJust lst) then Just $ mkApp (getName farg) (map fromJust lst)
|
||||||
|
else Nothing
|
||||||
|
where getConcTypes context [] i = []
|
||||||
|
getConcTypes context (ty:types) i =
|
||||||
|
let pos = Map.lookup ty context
|
||||||
|
in
|
||||||
|
if isNothing pos || (null $ fromJust pos) then [Nothing]
|
||||||
|
else
|
||||||
|
let mm = last $ fromJust pos
|
||||||
|
mmargs = getTypeArgs mm
|
||||||
|
newi = i + length mmargs - 1
|
||||||
|
lst = getConcTypes (Map.insert ty (init $ (fromJust pos)) context) types (newi+1)
|
||||||
|
in
|
||||||
|
if (all isJust lst) then -- i..newi
|
||||||
|
(Just $ mkApp (getName mm) [mkMeta j | j <- [1..(length mmargs)]]) : lst
|
||||||
|
else [Nothing]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
-- only covers simple expressions with meta variables, not the rest...
|
||||||
|
isGeneralizationOf :: Expr -> Expr -> Bool
|
||||||
|
isGeneralizationOf genExpr testExpr =
|
||||||
|
if isMeta genExpr then True
|
||||||
|
else if isMeta testExpr then False
|
||||||
|
else let genUnwrap = unApp genExpr
|
||||||
|
testUnwrap = unApp testExpr
|
||||||
|
in if isNothing genUnwrap || isNothing testUnwrap then False -- see if you can generalize here
|
||||||
|
else let (gencid, genargs) = fromJust genUnwrap
|
||||||
|
(testcid, testargs) = fromJust testUnwrap
|
||||||
|
in
|
||||||
|
(gencid == testcid) && (length genargs == length testargs)
|
||||||
|
&& (and [isGeneralizationOf g t | (g,t) <- (zip genargs testargs)])
|
||||||
|
|
||||||
|
{-do lst <- getConcTypes context types (i+1)
|
||||||
|
return $ mkMeta i : lst -}
|
||||||
|
|
||||||
|
debugReplaceArgs :: Expr -> Expr -> Environ -> IO (Maybe Expr)
|
||||||
|
debugReplaceArgs aexpr cexpr env =
|
||||||
|
if isNothing $ unApp aexpr then return Nothing
|
||||||
|
else if any isNothing $ map unApp $ snd $ fromJust $ unApp aexpr then return Nothing
|
||||||
|
else
|
||||||
|
let args = map (fst.fromJust.unApp) $ snd $ fromJust $ unApp aexpr
|
||||||
|
concExprs = map (\x -> fromJust $ Map.lookup x $ getConcMap env) args
|
||||||
|
in startReplace 1 cexpr concExprs
|
||||||
|
where
|
||||||
|
startReplace i cex [] = return $ Just cex
|
||||||
|
startReplace i cex (a:as) = do val <- debugReplaceConc cex i a
|
||||||
|
maybe ( --do putStrLn $ "didn't find "++ showExpr [] a ++ " in " ++showExpr [] cexpr
|
||||||
|
return Nothing)
|
||||||
|
(\x -> --do putStrLn $ "found it, the current expression is "++ showExpr [] x
|
||||||
|
startReplace (i+1) x as)
|
||||||
|
val
|
||||||
|
|
||||||
|
debugReplaceConc :: Expr -> Int -> Expr -> IO (Maybe Expr)
|
||||||
|
debugReplaceConc expr i e =
|
||||||
|
let (newe,isThere) = searchArg expr
|
||||||
|
in if isThere then return $ Just newe else return $ Nothing
|
||||||
|
where
|
||||||
|
searchArg e_ =
|
||||||
|
if isGeneralizationOf e e_ then (mkMeta i, True)
|
||||||
|
else maybe (e_,False) (\(cid,args) -> let repargs = map searchArg args
|
||||||
|
in (mkApp cid (map fst repargs), or $ map snd repargs)) $ unApp e_
|
||||||
|
|
||||||
|
|
||||||
|
{-
|
||||||
|
-- replaceArgs : Original expression to parse (from abstract syntax) -> Concrete expression (parsed)
|
||||||
|
replaceArgs :: Expr -> Expr -> Environ -> Maybe Expr
|
||||||
|
replaceArgs aexpr cexpr env =
|
||||||
|
if isNothing $ unApp aexpr then error $ "could't unwrap this "++ show aexpr
|
||||||
|
else if any isNothing $ map unApp $ snd $ fromJust $ unApp aexpr then error $ "couldn't unwrap more this : "++ show aexpr
|
||||||
|
else
|
||||||
|
let args = map (fst.fromJust.unApp) $ snd $ fromJust $ unApp aexpr
|
||||||
|
concExprs = map (\x -> fromJust $ Map.lookup x $ getConcMap env) args
|
||||||
|
in startReplace 1 cexpr concExprs
|
||||||
|
where
|
||||||
|
startReplace i cex [] = return cex
|
||||||
|
startReplace i cex (a:as) = maybe Nothing (\x -> startReplace (i+1) x as) $ replaceConc cex i a
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
replaceConc :: Expr -> Int -> Expr -> Maybe Expr
|
||||||
|
replaceConc expr i e =
|
||||||
|
let (newe,isThere) = searchArg expr
|
||||||
|
in if isThere then return newe else Nothing
|
||||||
|
where
|
||||||
|
searchArg e_ =
|
||||||
|
if isGeneralizationOf e e_ then (mkMeta i, True)
|
||||||
|
else maybe (e_,False) (\(cid,args) -> let repargs = map searchArg args
|
||||||
|
in (mkApp cid (map fst repargs), or $ map snd repargs)) $ unApp e_
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
writeResults :: Environ -> String -> IO ()
|
||||||
|
writeResults env fileName =
|
||||||
|
let cmap = getConcMap env
|
||||||
|
lincats = unlines $ map (\(x,y) -> "lincat " ++ showCId x ++ " = " ++ showCId y ++ " ; " ) $ Map.toList $ getTypeMap env
|
||||||
|
sigs = unlines $ map
|
||||||
|
(\x -> let n = getName x
|
||||||
|
no = length $ getTypeArgs x
|
||||||
|
oargs = unwords $ ("lin " ++ showCId n) : ["o"++show i | i <- [1..no]]
|
||||||
|
in (oargs ++ " = " ++ (simpleReplace $ showExpr [] $ fromJust $ Map.lookup n cmap) ++ " ; ")) $ concat $ Map.elems $ getSigs env
|
||||||
|
in
|
||||||
|
writeFile fileName ("\n" ++ lincats ++ "\n\n" ++ sigs)
|
||||||
|
|
||||||
|
|
||||||
|
simpleReplace :: String -> String
|
||||||
|
simpleReplace [] = []
|
||||||
|
simpleReplace ('?':xs) = 'o' : simpleReplace xs
|
||||||
|
simpleReplace (x:xs) = x : simpleReplace xs
|
||||||
|
-}
|
||||||
|
|
||||||
|
isMeta :: Expr -> Bool
|
||||||
|
isMeta = isJust.unMeta
|
||||||
|
|
||||||
|
-- works with utf-8 characters also, as it seems
|
||||||
|
|
||||||
|
|
||||||
|
mkFuncWithArg :: ((CId,CId),[CId]) -> FuncWithArg
|
||||||
|
mkFuncWithArg ((c1,c2),cids) = FuncWithArg c1 c2 cids
|
||||||
|
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
initial :: TypeMap -> ConcMap -> [FuncWithArg] -> [FuncWithArg] -> Environ
|
||||||
|
initial tm cm fss allfs = Env tm cm (mkSigs fss) allfs
|
||||||
|
{-
|
||||||
|
testInit :: [FuncWithArg] -> Environ
|
||||||
|
testInit allfs = initial lTypes Map.empty [] allfs
|
||||||
|
|
||||||
|
lTypes = Map.fromList [(mkCId "Comment", mkCId "S"),(mkCId "Item", mkCId "NP"), (mkCId "Kind", mkCId "CN"), (mkCId "Quality", mkCId "AP")]
|
||||||
|
-}
|
||||||
|
startCateg = mkCId "Comment"
|
||||||
|
-- question about either to give the startcat or not ...
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
----------------------------------------------------------------------------------------------------------
|
||||||
|
{-
|
||||||
|
main =
|
||||||
|
do args <- getArgs
|
||||||
|
case args of
|
||||||
|
[pgfFile] ->
|
||||||
|
do pgf <- readPGF pgfFile
|
||||||
|
parsePGF <- readPGF parsePGFfile
|
||||||
|
fsWithArg <- forExample pgf
|
||||||
|
let funcsWithArg = map (map mkFuncWithArg) fsWithArg
|
||||||
|
let morpho = buildMorpho parsePGF parseLang
|
||||||
|
let fss = concat funcsWithArg
|
||||||
|
let fileName = takeWhile (/='.') pgfFile ++ lang ++ ".gf"
|
||||||
|
env <- start parsePGF pgf morpho (testInit fss) fss
|
||||||
|
putStrLn $ "Should I write the results to a file ? yes/no"
|
||||||
|
ans <-getLine
|
||||||
|
if ans == "yes" then do writeResults env fileName
|
||||||
|
putStrLn $ "Wrote file " ++ fileName
|
||||||
|
else return ()
|
||||||
|
_ -> fail "usage : Testing <path-to-pgf> "
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
start :: PGF -> PGF -> Morpho -> Environ -> [FuncWithArg] -> IO Environ
|
||||||
|
start parsePGF pgfFile morpho env lst =
|
||||||
|
do putStrLn "Do you want examples from another language ? (no/concrete syntax name otherwise)"
|
||||||
|
ans1 <- getLine
|
||||||
|
putStrLn "Do you want testing mode ? (yes/no)"
|
||||||
|
ans2 <- getLine
|
||||||
|
case (ans1,ans2) of
|
||||||
|
("no","no") -> do putStrLn "no extra language, just the abstract syntax tree"
|
||||||
|
interact env lst False Nothing
|
||||||
|
(_,"no") -> interact env lst False (readLanguage ans1)
|
||||||
|
("no","yes") -> do putStrLn "no extra language, just the abstract syntax tree"
|
||||||
|
interact env lst True Nothing
|
||||||
|
(_,"yes") -> interact env lst True (readLanguage ans1)
|
||||||
|
("no",_) -> do putStrLn "no extra language, just the abstract syntax tree"
|
||||||
|
putStrLn $ "I assume you don't want the testing mode ... "
|
||||||
|
interact env lst False Nothing
|
||||||
|
(_,_) -> do putStrLn $ "I assume you don't want the testing mode ... "
|
||||||
|
interact env lst False (readLanguage ans1)
|
||||||
|
where
|
||||||
|
|
||||||
|
interact environ [] func _ = return environ
|
||||||
|
interact environ (farg:fargs) boo otherLang =
|
||||||
|
do
|
||||||
|
maybeEnv <- basicInter farg otherLang environ boo
|
||||||
|
if isNothing maybeEnv then return environ
|
||||||
|
else interact (fromJust maybeEnv) fargs boo otherLang
|
||||||
|
|
||||||
|
basicInter farg js environ False =
|
||||||
|
let e_ = getExpr farg environ in
|
||||||
|
if isNothing e_ then return $ Just environ
|
||||||
|
else parseAndBuild farg js environ (getType farg) e_ Nothing
|
||||||
|
basicInter farg js environ True =
|
||||||
|
let (e_,e_test) = get2Expr farg environ in
|
||||||
|
if isNothing e_ then return $ Just environ
|
||||||
|
else if isNothing e_test then do putStrLn $ "not enough arguments "++ (showCId $ getName farg)
|
||||||
|
parseAndBuild farg js environ (getType farg) e_ Nothing
|
||||||
|
else parseAndBuild farg js environ (getType farg) e_ e_test
|
||||||
|
|
||||||
|
-- . head . generateRandomFrom gen2 pgfFile
|
||||||
|
parseAndBuild farg js environ ty e_ e_test =
|
||||||
|
do let expr = fromJust e_
|
||||||
|
gen1 <- newStdGen
|
||||||
|
gen2 <- newStdGen
|
||||||
|
let newexpr = head $ generateRandomFrom gen1 pgfFile expr
|
||||||
|
let embeddedExpr = maybe "***" (showExpr [] ) (embedInStart (getAll environ) (Map.fromList [(ty,expr)]))
|
||||||
|
let lexpr = if isNothing js then "" else "\n-- " ++ linearize pgfFile (fromJust js) newexpr ++ " --"
|
||||||
|
putStrLn $ "Give an example for " ++ (showExpr [] expr)
|
||||||
|
++ lexpr ++ "and now"
|
||||||
|
++ "\n\nas in " ++ embeddedExpr ++ "\n\n"
|
||||||
|
--
|
||||||
|
ex <- getLine
|
||||||
|
if (ex == ":q") then return Nothing
|
||||||
|
else
|
||||||
|
let ctype = fromJust $ Map.lookup (getType farg) (getTypeMap environ) in
|
||||||
|
do env' <- decypher farg ex expr environ (fromJust $ readType $ showCId ctype) e_test
|
||||||
|
return (Just env')
|
||||||
|
|
||||||
|
decypher farg ex expr environ ty e_test =
|
||||||
|
--do putStrLn $ "We need to parse " ++ ex ++ " as " ++ show ctype
|
||||||
|
let pTrees = parse parsePGF (fromJust $ readLanguage "ParseEng") ty ex in
|
||||||
|
pickTree farg expr environ ex e_test pTrees
|
||||||
|
|
||||||
|
-- putStrLn $ "And now for testing, \n is this also correct yes/no \n ## " ++ (linearize parsePGF parseLang $ mapToResource newenv $ llin newenv e_test) ++ " ##"
|
||||||
|
|
||||||
|
-- select the right tree among the options given by the parser
|
||||||
|
pickTree farg expr environ ex e_test [] =
|
||||||
|
let miswords = morphoMissing morpho (words ex)
|
||||||
|
in
|
||||||
|
if null miswords then do putStrLn $ "all words known, but some syntactic construction is not covered by the grammar..."
|
||||||
|
return environ
|
||||||
|
else do putStrLn $ "the following words are unknown, please add them to the lexicon: " ++ show miswords
|
||||||
|
return environ
|
||||||
|
pickTree farg expr environ ex e_test [tree] =
|
||||||
|
do val <- searchGoodTree environ expr [tree] -- maybe order here after the probabilities for better precision
|
||||||
|
maybe (do putStrLn $ "none of the trees is consistent with the rest of the grammar, please check arguments "
|
||||||
|
return environ)
|
||||||
|
(\(x,newtree) -> let newenv = updateEnv environ farg (getType farg) x in
|
||||||
|
do putStrLn $ "the result is "++showExpr [] x
|
||||||
|
newtestenv <- testTest newenv e_test -- question ? should it belong there - there is just one possibility of a tree...
|
||||||
|
return newenv) val
|
||||||
|
pickTree farg expr environ ex e_test parseTrees =
|
||||||
|
do putStrLn $ "There is more than one possibility, do you want to choose the right tree yourself ? yes/no "
|
||||||
|
putStr " >"
|
||||||
|
ans <- getLine
|
||||||
|
if ans == "yes" then do pTree <- chooseRightTree parseTrees
|
||||||
|
processTree farg environ expr pTree e_test
|
||||||
|
else processTree farg environ expr parseTrees e_test
|
||||||
|
|
||||||
|
-- introduce testing function, if it doesn't work, then reparse, take that tree
|
||||||
|
testTree envv e_test = return envv -- TO DO - add testing here
|
||||||
|
|
||||||
|
testTest envv Nothing = return envv
|
||||||
|
testTest envv (Just exxpr) = testTree envv exxpr
|
||||||
|
|
||||||
|
|
||||||
|
-- allows the user to pick his own tree
|
||||||
|
chooseRightTree trees = return trees -- TO DO - add something clever here
|
||||||
|
|
||||||
|
-- selects the tree from where one can abstract over the original arguments
|
||||||
|
processTree farg environ expr lsTrees e_test =
|
||||||
|
let trmes = if length lsTrees == 1 then "the tree is not consistent " else "none of the trees is consistent " in
|
||||||
|
do val <- searchGoodTree environ expr lsTrees
|
||||||
|
maybe (do putStrLn $ trmes ++ "with the rest of the grammar, please check arguments! "
|
||||||
|
return environ)
|
||||||
|
(\(x,newtree) -> let newenv = updateEnv environ farg (getType farg) x in
|
||||||
|
do putStrLn $ "the result is "++showExpr [] x
|
||||||
|
newtestenv <- testTest newenv e_test
|
||||||
|
return newenv) val
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
-------------------------------
|
||||||
|
|
||||||
|
get2Expr :: FuncWithArg -> Environ -> (Maybe Expr, Maybe Expr)
|
||||||
|
get2Expr farg env =
|
||||||
|
let tys = getTypeArgs farg
|
||||||
|
ctx = getSigs env
|
||||||
|
(lst1,lst2) = getConcTypes2 ctx tys 1
|
||||||
|
arg1 = if (all isJust lst1) then Just $ mkApp (getName farg) (map fromJust lst1) else Nothing
|
||||||
|
arg2 = if (all isJust lst2) then Just $ mkApp (getName farg) (map fromJust lst2) else Nothing
|
||||||
|
in if arg1 == arg2 then (arg1, Nothing)
|
||||||
|
else (arg1,arg2)
|
||||||
|
where
|
||||||
|
getConcTypes2 context [] i = ([],[])
|
||||||
|
getConcTypes2 context (ty:types) i =
|
||||||
|
let pos = Map.lookup ty context
|
||||||
|
in
|
||||||
|
if isNothing pos || (null $ fromJust pos) then ([Nothing],[Nothing])
|
||||||
|
else
|
||||||
|
let (mm,tt) = (last $ fromJust pos, head $ fromJust pos)
|
||||||
|
mmargs = getTypeArgs mm
|
||||||
|
newi = i + length mmargs - 1
|
||||||
|
(lst1,lst2) = getConcTypes2 (Map.insert ty (init (fromJust pos)) context) types (newi+1)
|
||||||
|
ttargs = getTypeArgs tt
|
||||||
|
newtti = i + length ttargs - 1
|
||||||
|
fstArg = if (all isJust lst1) then -- i..newi
|
||||||
|
(Just $ mkApp (getName mm) [mkMeta j | j <- [1..(length mmargs)]]) : lst1
|
||||||
|
else [Nothing]
|
||||||
|
sndArg = if (all isJust lst2) then
|
||||||
|
(Just $ mkApp (getName tt) [mkMeta j | j <- [1..(length ttargs)]]) : lst2
|
||||||
|
else [Nothing]
|
||||||
|
in
|
||||||
|
(fstArg,sndArg)
|
||||||
|
|
||||||
|
|
||||||
|
-}
|
||||||
|
|
||||||
128
src/example-based/ExampleService.hs
Normal file
128
src/example-based/ExampleService.hs
Normal file
@@ -0,0 +1,128 @@
|
|||||||
|
module ExampleService(cgiMain,cgiMain',newPGFCache) where
|
||||||
|
import System.Random(newStdGen)
|
||||||
|
import System.FilePath((</>),makeRelative)
|
||||||
|
import Data.Map(fromList)
|
||||||
|
import Data.Char(isDigit)
|
||||||
|
import Data.Maybe(fromJust)
|
||||||
|
import qualified Codec.Binary.UTF8.String as UTF8 (decodeString)
|
||||||
|
import PGF
|
||||||
|
import GF.Compile.ToAPI
|
||||||
|
import Network.CGI
|
||||||
|
import Text.JSON
|
||||||
|
import CGIUtils
|
||||||
|
import Cache
|
||||||
|
import qualified ExampleDemo as E
|
||||||
|
|
||||||
|
newPGFCache = newCache readPGF
|
||||||
|
|
||||||
|
|
||||||
|
cgiMain :: Cache PGF -> CGI CGIResult
|
||||||
|
cgiMain = handleErrors . handleCGIErrors . cgiMain' "." "."
|
||||||
|
|
||||||
|
cgiMain' root cwd cache =
|
||||||
|
do command <- getInp "command"
|
||||||
|
environ <- parseEnviron =<< getInp "state"
|
||||||
|
case command of
|
||||||
|
"possibilities" -> doPossibilities environ
|
||||||
|
"provide_example" -> doProvideExample root cwd cache environ
|
||||||
|
"abstract_example" -> doAbstractExample cwd cache environ
|
||||||
|
"test_function" -> doTestFunction cwd cache environ
|
||||||
|
_ -> throwCGIError 400 ("Unknown command: "++command) []
|
||||||
|
|
||||||
|
doPossibilities environ =
|
||||||
|
do example_environ <- parseEnviron =<< getInp "example_state"
|
||||||
|
outputJSONP (E.getNext environ example_environ)
|
||||||
|
|
||||||
|
doProvideExample root cwd cache environ =
|
||||||
|
do Just lang <- readInput "lang"
|
||||||
|
fun <- getCId "fun"
|
||||||
|
parsePGF <- readParsePGF cwd cache
|
||||||
|
let adjpath path = root</>makeRelative "/" (makeRelative root cwd</>path)
|
||||||
|
pgf <- liftIO . readCache cache . adjpath =<< getInp "grammar"
|
||||||
|
gen <- liftIO newStdGen
|
||||||
|
let Just (e,s) = E.provideExample gen environ fun parsePGF pgf lang
|
||||||
|
res = (showExpr [] e,s)
|
||||||
|
liftIO $ logError $ "proveExample ... = "++show res
|
||||||
|
outputJSONP res
|
||||||
|
|
||||||
|
doAbstractExample cwd cache environ =
|
||||||
|
do example <- getInp "input"
|
||||||
|
Just params <- readInput "params"
|
||||||
|
absstr <- getInp "abstract"
|
||||||
|
Just abs <- return $ readExpr absstr
|
||||||
|
liftIO $ logError $ "abstract = "++showExpr [] abs
|
||||||
|
Just cat <- readInput "cat"
|
||||||
|
let t = mkType [] cat []
|
||||||
|
parsePGF <- readParsePGF cwd cache
|
||||||
|
let lang:_ = languages parsePGF
|
||||||
|
ae <- liftIO $ abstractExample parsePGF environ lang t abs example
|
||||||
|
outputJSONP (fmap (\(e,_)->(exprToAPI (instExpMeta params e),e)) ae)
|
||||||
|
|
||||||
|
abstractExample parsePGF env lang cat abs example =
|
||||||
|
E.searchGoodTree env abs (parse parsePGF lang cat example)
|
||||||
|
|
||||||
|
doTestFunction cwd cache environ =
|
||||||
|
do fun <- getCId "fun"
|
||||||
|
parsePGF <- readParsePGF cwd cache
|
||||||
|
let lang:_ = languages parsePGF
|
||||||
|
Just txt <- return (E.testThis environ fun parsePGF lang)
|
||||||
|
outputJSONP txt
|
||||||
|
|
||||||
|
getCId :: String -> CGI CId
|
||||||
|
getCId name = maybe err return =<< fmap readCId (getInp name)
|
||||||
|
where err = throwCGIError 400 ("Bad "++name) []
|
||||||
|
{-
|
||||||
|
getLimit :: CGI Int
|
||||||
|
getLimit = maybe err return =<< readInput "limit"
|
||||||
|
where err = throwCGIError 400 "Missing/bad limit" []
|
||||||
|
-}
|
||||||
|
|
||||||
|
readParsePGF cwd cache =
|
||||||
|
do parsepgf <- getInp "parser"
|
||||||
|
liftIO $ readCache cache (cwd</>parsepgf)
|
||||||
|
|
||||||
|
parseEnviron s = do state <- liftIO $ readIO s
|
||||||
|
return $ environ state
|
||||||
|
|
||||||
|
getInp name = maybe err (return . UTF8.decodeString) =<< getInput name
|
||||||
|
where err = throwCGIError 400 ("Missing parameter: "++name) []
|
||||||
|
|
||||||
|
|
||||||
|
instance JSON CId where
|
||||||
|
showJSON = showJSON . show
|
||||||
|
readJSON = (readResult =<<) . readJSON
|
||||||
|
|
||||||
|
instance JSON Expr where
|
||||||
|
showJSON = showJSON . showExpr []
|
||||||
|
readJSON = (m2r . readExpr =<<) . readJSON
|
||||||
|
|
||||||
|
m2r = maybe (Error "read failed") Ok
|
||||||
|
|
||||||
|
readResult s = case reads s of
|
||||||
|
(x,r):_ | lex r==[("","")] -> Ok x
|
||||||
|
_ -> Error "read failed"
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- cat lincat fun lin fun cat cat
|
||||||
|
environ :: ([(CId, CId)],[(CId, Expr)],[((CId, CId), [CId])]) -> E.Environ
|
||||||
|
environ (lincats,lins0,funs) =
|
||||||
|
E.initial (fromList lincats) concmap fs allfs
|
||||||
|
where
|
||||||
|
concmap = fromList lins
|
||||||
|
allfs = map E.mkFuncWithArg funs
|
||||||
|
fs = [E.mkFuncWithArg f | f@((fn,_),_)<-funs, fn `elem` cns]
|
||||||
|
cns = map fst lins
|
||||||
|
lins = filter (not . E.isMeta .snd) lins0
|
||||||
|
|
||||||
|
|
||||||
|
instExpMeta :: [CId] -> Expr -> Expr
|
||||||
|
instExpMeta ps = fromJust . readExpr . instMeta ps . showExpr []
|
||||||
|
|
||||||
|
instMeta :: [CId] -> String -> String
|
||||||
|
instMeta ps s =
|
||||||
|
case break (=='?') s of
|
||||||
|
(s1,'?':s2) ->
|
||||||
|
case span isDigit s2 of
|
||||||
|
(s21@(_:_),s22) -> s1++show (ps!!(read s21-1))++instMeta ps s22
|
||||||
|
("",s22) -> s1++'?':instMeta ps s22
|
||||||
|
(_,_) -> s
|
||||||
15
src/example-based/exb-fcgi.hs
Normal file
15
src/example-based/exb-fcgi.hs
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
{-# LANGUAGE CPP #-}
|
||||||
|
import Control.Concurrent(forkIO)
|
||||||
|
import Network.FastCGI(runFastCGI,runFastCGIConcurrent')
|
||||||
|
import ExampleService(cgiMain,newPGFCache)
|
||||||
|
|
||||||
|
main = do --stderrToFile logFile
|
||||||
|
fcgiMain =<< newPGFCache
|
||||||
|
|
||||||
|
|
||||||
|
fcgiMain cache =
|
||||||
|
#ifndef mingw32_HOST_OS
|
||||||
|
runFastCGIConcurrent' forkIO 100 (cgiMain cache)
|
||||||
|
#else
|
||||||
|
runFastCGI (cgiMain cache)
|
||||||
|
#endif
|
||||||
25
src/example-based/gf-exb.cabal
Normal file
25
src/example-based/gf-exb.cabal
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
Name: gf-exb
|
||||||
|
Version: 1.0
|
||||||
|
Cabal-version: >= 1.8
|
||||||
|
Build-type: Simple
|
||||||
|
License: GPL
|
||||||
|
Synopsis: Example-based grammar writing for the Grammatical Framework
|
||||||
|
|
||||||
|
executable exb.fcgi
|
||||||
|
main-is: exb-fcgi.hs
|
||||||
|
Hs-source-dirs: . ../server ../compiler ../runtime/haskell
|
||||||
|
other-modules: ExampleService ExampleDemo
|
||||||
|
FastCGIUtils Cache GF.Compile.ToAPI
|
||||||
|
-- and a lot more...
|
||||||
|
ghc-options: -threaded
|
||||||
|
if impl(ghc>=7.0)
|
||||||
|
ghc-options: -rtsopts
|
||||||
|
|
||||||
|
build-depends: base >=4.2 && <5, json, cgi, fastcgi, random,
|
||||||
|
containers, old-time, directory, bytestring, utf8-string,
|
||||||
|
pretty, array, mtl, fst, filepath
|
||||||
|
|
||||||
|
if os(windows)
|
||||||
|
ghc-options: -optl-mwindows
|
||||||
|
else
|
||||||
|
build-depends: unix
|
||||||
20
src/example-based/todo.txt
Normal file
20
src/example-based/todo.txt
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
|
||||||
|
Editor improvements for example-based grammar writing:
|
||||||
|
+ Remove the same language from the example language menu
|
||||||
|
+ Send the other language environment to getNext
|
||||||
|
- Compile a new .pgf automatically when needed
|
||||||
|
- Update buttons automatically when functions are added or removed
|
||||||
|
- Switch over to using AbsParadigmsEng.pgf instead of the old exprToAPI function
|
||||||
|
|
||||||
|
Editor support for guided construction of linearization functions
|
||||||
|
- enter api expressions by parsing them with AbsParadigmsEng.pgf in minibar
|
||||||
|
- replace simpleParseInput with one that accepts quoted string literals
|
||||||
|
- use lexcode/unlexcode in minibar
|
||||||
|
- better support for literals in minibar (completion info from the PGF
|
||||||
|
library should indicate if literals are acceptable)
|
||||||
|
|
||||||
|
Server support for example-based grammar writing:
|
||||||
|
- Change getNext to use info from the example language
|
||||||
|
- Random generator restricted to defined functions
|
||||||
|
|
||||||
|
- More testing
|
||||||
@@ -68,6 +68,7 @@ libpgf_la_SOURCES = \
|
|||||||
pgf/data.h \
|
pgf/data.h \
|
||||||
pgf/expr.c \
|
pgf/expr.c \
|
||||||
pgf/expr.h \
|
pgf/expr.h \
|
||||||
|
pgf/scanner.c \
|
||||||
pgf/parser.c \
|
pgf/parser.c \
|
||||||
pgf/lookup.c \
|
pgf/lookup.c \
|
||||||
pgf/jit.c \
|
pgf/jit.c \
|
||||||
|
|||||||
@@ -74,6 +74,8 @@
|
|||||||
|
|
||||||
#ifdef GU_ALIGNOF
|
#ifdef GU_ALIGNOF
|
||||||
# define gu_alignof GU_ALIGNOF
|
# define gu_alignof GU_ALIGNOF
|
||||||
|
#elif defined(_MSC_VER)
|
||||||
|
# define gu_alignof __alignof
|
||||||
#else
|
#else
|
||||||
# define gu_alignof(t_) \
|
# define gu_alignof(t_) \
|
||||||
((size_t)(offsetof(struct { char c_; t_ e_; }, e_)))
|
((size_t)(offsetof(struct { char c_; t_ e_; }, e_)))
|
||||||
@@ -87,7 +89,7 @@
|
|||||||
|
|
||||||
#define GU_COMMA ,
|
#define GU_COMMA ,
|
||||||
|
|
||||||
#define GU_ARRAY_LEN(t,a) (sizeof((const t[])a) / sizeof(t))
|
#define GU_ARRAY_LEN(a) (sizeof(a) / sizeof(a[0]))
|
||||||
|
|
||||||
#define GU_ID(...) __VA_ARGS__
|
#define GU_ID(...) __VA_ARGS__
|
||||||
|
|
||||||
@@ -193,9 +195,13 @@ typedef union {
|
|||||||
void (*fp)();
|
void (*fp)();
|
||||||
} GuMaxAlign;
|
} GuMaxAlign;
|
||||||
|
|
||||||
|
#if defined(_MSC_VER)
|
||||||
|
#include <malloc.h>
|
||||||
|
#define gu_alloca(N) alloca(N)
|
||||||
|
#else
|
||||||
#define gu_alloca(N) \
|
#define gu_alloca(N) \
|
||||||
(((union { GuMaxAlign align_; uint8_t buf_[N]; }){{0}}).buf_)
|
(((union { GuMaxAlign align_; uint8_t buf_[N]; }){{0}}).buf_)
|
||||||
|
#endif
|
||||||
|
|
||||||
// For Doxygen
|
// For Doxygen
|
||||||
#define GU_PRIVATE /** @private */
|
#define GU_PRIVATE /** @private */
|
||||||
|
|||||||
@@ -8,6 +8,10 @@
|
|||||||
#include <sys/mman.h>
|
#include <sys/mman.h>
|
||||||
#include <sys/stat.h>
|
#include <sys/stat.h>
|
||||||
#endif
|
#endif
|
||||||
|
#if defined(__MINGW32__) || defined(_MSC_VER)
|
||||||
|
#include <malloc.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
#if !defined(_MSC_VER)
|
#if !defined(_MSC_VER)
|
||||||
#include <unistd.h>
|
#include <unistd.h>
|
||||||
#endif
|
#endif
|
||||||
@@ -108,6 +112,39 @@ gu_mem_buf_alloc(size_t min_size, size_t* real_size_out)
|
|||||||
return gu_mem_buf_realloc(NULL, min_size, real_size_out);
|
return gu_mem_buf_realloc(NULL, min_size, real_size_out);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#if defined(__MINGW32__) || defined(_MSC_VER)
|
||||||
|
#include <windows.h>
|
||||||
|
|
||||||
|
static int
|
||||||
|
getpagesize()
|
||||||
|
{
|
||||||
|
SYSTEM_INFO system_info;
|
||||||
|
GetSystemInfo(&system_info);
|
||||||
|
return system_info.dwPageSize;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
GU_API void*
|
||||||
|
gu_mem_page_alloc(size_t min_size, size_t* real_size_out)
|
||||||
|
{
|
||||||
|
size_t page_size = getpagesize();
|
||||||
|
size_t size = ((min_size + page_size - 1) / page_size) * page_size;
|
||||||
|
void *page = NULL;
|
||||||
|
|
||||||
|
#if defined(ANDROID)
|
||||||
|
if ((page = memalign(page_size, size)) == NULL) {
|
||||||
|
#elif defined(__MINGW32__) || defined(_MSC_VER)
|
||||||
|
if ((page = malloc(size)) == NULL) {
|
||||||
|
#else
|
||||||
|
if (posix_memalign(&page, page_size, size) != 0) {
|
||||||
|
#endif
|
||||||
|
gu_fatal("Memory allocation failed");
|
||||||
|
}
|
||||||
|
|
||||||
|
*real_size_out = size;
|
||||||
|
return page;
|
||||||
|
}
|
||||||
|
|
||||||
GU_API void
|
GU_API void
|
||||||
gu_mem_buf_free(void* buf)
|
gu_mem_buf_free(void* buf)
|
||||||
{
|
{
|
||||||
@@ -132,6 +169,7 @@ struct GuFinalizerNode {
|
|||||||
enum GuPoolType {
|
enum GuPoolType {
|
||||||
GU_POOL_HEAP,
|
GU_POOL_HEAP,
|
||||||
GU_POOL_LOCAL,
|
GU_POOL_LOCAL,
|
||||||
|
GU_POOL_PAGE,
|
||||||
GU_POOL_MMAP
|
GU_POOL_MMAP
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -180,6 +218,16 @@ gu_new_pool(void)
|
|||||||
return pool;
|
return pool;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
GU_API GuPool*
|
||||||
|
gu_new_page_pool(void)
|
||||||
|
{
|
||||||
|
size_t sz = GU_FLEX_SIZE(GuPool, init_buf, gu_mem_pool_initial_size);
|
||||||
|
uint8_t* buf = gu_mem_page_alloc(sz, &sz);
|
||||||
|
GuPool* pool = gu_init_pool(buf, sz);
|
||||||
|
pool->type = GU_POOL_PAGE;
|
||||||
|
return pool;
|
||||||
|
}
|
||||||
|
|
||||||
GU_API GuPool*
|
GU_API GuPool*
|
||||||
gu_mmap_pool(char* fpath, void* addr, size_t size, void**pptr)
|
gu_mmap_pool(char* fpath, void* addr, size_t size, void**pptr)
|
||||||
{
|
{
|
||||||
@@ -238,7 +286,10 @@ gu_pool_expand(GuPool* pool, size_t req)
|
|||||||
gu_mem_chunk_max_size));
|
gu_mem_chunk_max_size));
|
||||||
gu_assert(real_req >= sizeof(GuMemChunk));
|
gu_assert(real_req >= sizeof(GuMemChunk));
|
||||||
size_t size = 0;
|
size_t size = 0;
|
||||||
GuMemChunk* chunk = gu_mem_buf_alloc(real_req, &size);
|
GuMemChunk* chunk =
|
||||||
|
(pool->type == GU_POOL_PAGE)
|
||||||
|
? gu_mem_page_alloc(real_req, &size)
|
||||||
|
: gu_mem_buf_alloc(real_req, &size);
|
||||||
chunk->next = pool->chunks;
|
chunk->next = pool->chunks;
|
||||||
pool->chunks = chunk;
|
pool->chunks = chunk;
|
||||||
pool->curr_buf = (uint8_t*) chunk;
|
pool->curr_buf = (uint8_t*) chunk;
|
||||||
@@ -309,6 +360,7 @@ gu_malloc_prefixed(GuPool* pool, size_t pre_align, size_t pre_size,
|
|||||||
size_t full_size = gu_mem_advance(offsetof(GuMemChunk, data),
|
size_t full_size = gu_mem_advance(offsetof(GuMemChunk, data),
|
||||||
pre_align, pre_size, align, size);
|
pre_align, pre_size, align, size);
|
||||||
if (full_size > gu_mem_max_shared_alloc &&
|
if (full_size > gu_mem_max_shared_alloc &&
|
||||||
|
pool->type != GU_POOL_PAGE &&
|
||||||
pool->type != GU_POOL_MMAP) {
|
pool->type != GU_POOL_MMAP) {
|
||||||
GuMemChunk* chunk = gu_mem_alloc(full_size);
|
GuMemChunk* chunk = gu_mem_alloc(full_size);
|
||||||
chunk->next = pool->chunks;
|
chunk->next = pool->chunks;
|
||||||
|
|||||||
@@ -55,6 +55,11 @@ gu_local_pool_(uint8_t* init_buf, size_t sz);
|
|||||||
* should not be used in the bodies of recursive functions.
|
* should not be used in the bodies of recursive functions.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
/// Create a pool where each chunk is corresponds to one or
|
||||||
|
/// more pages.
|
||||||
|
GU_API_DECL GuPool*
|
||||||
|
gu_new_page_pool(void);
|
||||||
|
|
||||||
/// Create a pool stored in a memory mapped file.
|
/// Create a pool stored in a memory mapped file.
|
||||||
GU_API_DECL GuPool*
|
GU_API_DECL GuPool*
|
||||||
gu_mmap_pool(char* fpath, void* addr, size_t size, void**pptr);
|
gu_mmap_pool(char* fpath, void* addr, size_t size, void**pptr);
|
||||||
@@ -198,6 +203,9 @@ gu_mem_buf_realloc(
|
|||||||
size_t min_size,
|
size_t min_size,
|
||||||
size_t* real_size_out);
|
size_t* real_size_out);
|
||||||
|
|
||||||
|
/// Allocate enough memory pages to contain min_size bytes.
|
||||||
|
GU_API_DECL void*
|
||||||
|
gu_mem_page_alloc(size_t min_size, size_t* real_size_out);
|
||||||
|
|
||||||
/// Free a memory buffer.
|
/// Free a memory buffer.
|
||||||
GU_API_DECL void
|
GU_API_DECL void
|
||||||
|
|||||||
@@ -100,6 +100,11 @@ gu_seq_free(GuSeq* seq)
|
|||||||
gu_mem_buf_free(seq);
|
gu_mem_buf_free(seq);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static void
|
||||||
|
gu_dummy_finalizer(GuFinalizer* self)
|
||||||
|
{
|
||||||
|
}
|
||||||
|
|
||||||
GU_API void
|
GU_API void
|
||||||
gu_buf_require(GuBuf* buf, size_t req_len)
|
gu_buf_require(GuBuf* buf, size_t req_len)
|
||||||
{
|
{
|
||||||
@@ -110,6 +115,8 @@ gu_buf_require(GuBuf* buf, size_t req_len)
|
|||||||
size_t req_size = sizeof(GuSeq) + buf->elem_size * req_len;
|
size_t req_size = sizeof(GuSeq) + buf->elem_size * req_len;
|
||||||
size_t real_size;
|
size_t real_size;
|
||||||
|
|
||||||
|
gu_require(buf->fin.fn != gu_dummy_finalizer);
|
||||||
|
|
||||||
if (buf->seq == NULL || buf->seq == gu_empty_seq()) {
|
if (buf->seq == NULL || buf->seq == gu_empty_seq()) {
|
||||||
buf->seq = gu_mem_buf_alloc(req_size, &real_size);
|
buf->seq = gu_mem_buf_alloc(req_size, &real_size);
|
||||||
buf->seq->len = 0;
|
buf->seq->len = 0;
|
||||||
@@ -164,6 +171,24 @@ gu_buf_freeze(GuBuf* buf, GuPool* pool)
|
|||||||
return seq;
|
return seq;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
GU_API void
|
||||||
|
gu_buf_evacuate(GuBuf* buf, GuPool* pool)
|
||||||
|
{
|
||||||
|
if (buf->seq != gu_empty_seq()) {
|
||||||
|
size_t len = gu_buf_length(buf);
|
||||||
|
|
||||||
|
GuSeq* seq = gu_make_seq(buf->elem_size, len, pool);
|
||||||
|
void* bufdata = gu_buf_data(buf);
|
||||||
|
void* seqdata = gu_seq_data(seq);
|
||||||
|
memcpy(seqdata, bufdata, buf->elem_size * len);
|
||||||
|
gu_mem_buf_free(buf->seq);
|
||||||
|
|
||||||
|
buf->seq = seq;
|
||||||
|
buf->fin.fn = gu_dummy_finalizer;
|
||||||
|
buf->avail_len = len;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
GU_API void*
|
GU_API void*
|
||||||
gu_buf_insert(GuBuf* buf, size_t index)
|
gu_buf_insert(GuBuf* buf, size_t index)
|
||||||
{
|
{
|
||||||
@@ -335,13 +360,8 @@ GU_API void
|
|||||||
gu_buf_heap_pop(GuBuf *buf, GuOrder *order, void* data_out)
|
gu_buf_heap_pop(GuBuf *buf, GuOrder *order, void* data_out)
|
||||||
{
|
{
|
||||||
const void* last = gu_buf_trim(buf); // raises an error if empty
|
const void* last = gu_buf_trim(buf); // raises an error if empty
|
||||||
|
memcpy(data_out, buf->seq->data, buf->elem_size);
|
||||||
if (gu_buf_length(buf) > 0) {
|
gu_heap_siftup(buf, order, last, 0);
|
||||||
memcpy(data_out, buf->seq->data, buf->elem_size);
|
|
||||||
gu_heap_siftup(buf, order, last, 0);
|
|
||||||
} else {
|
|
||||||
memcpy(data_out, last, buf->elem_size);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
GU_API void
|
GU_API void
|
||||||
|
|||||||
@@ -182,6 +182,9 @@ gu_buf_heapify(GuBuf *buf, GuOrder *order);
|
|||||||
|
|
||||||
GU_API_DECL GuSeq*
|
GU_API_DECL GuSeq*
|
||||||
gu_buf_freeze(GuBuf* buf, GuPool* pool);
|
gu_buf_freeze(GuBuf* buf, GuPool* pool);
|
||||||
|
|
||||||
|
GU_API_DECL void
|
||||||
|
gu_buf_evacuate(GuBuf* buf, GuPool* pool);
|
||||||
#endif // GU_SEQ_H_
|
#endif // GU_SEQ_H_
|
||||||
|
|
||||||
#ifdef GU_STRING_H_
|
#ifdef GU_STRING_H_
|
||||||
|
|||||||
@@ -197,16 +197,16 @@ pgf_literal_hash(GuHash h, PgfLiteral lit);
|
|||||||
PGF_API_DECL GuHash
|
PGF_API_DECL GuHash
|
||||||
pgf_expr_hash(GuHash h, PgfExpr e);
|
pgf_expr_hash(GuHash h, PgfExpr e);
|
||||||
|
|
||||||
PGF_API size_t
|
PGF_API_DECL size_t
|
||||||
pgf_expr_size(PgfExpr expr);
|
pgf_expr_size(PgfExpr expr);
|
||||||
|
|
||||||
PGF_API GuSeq*
|
PGF_API_DECL GuSeq*
|
||||||
pgf_expr_functions(PgfExpr expr, GuPool* pool);
|
pgf_expr_functions(PgfExpr expr, GuPool* pool);
|
||||||
|
|
||||||
PGF_API PgfExpr
|
PGF_API_DECL PgfExpr
|
||||||
pgf_expr_substitute(PgfExpr expr, GuSeq* meta_values, GuPool* pool);
|
pgf_expr_substitute(PgfExpr expr, GuSeq* meta_values, GuPool* pool);
|
||||||
|
|
||||||
PGF_API PgfType*
|
PGF_API_DECL PgfType*
|
||||||
pgf_type_substitute(PgfType* type, GuSeq* meta_values, GuPool* pool);
|
pgf_type_substitute(PgfType* type, GuSeq* meta_values, GuPool* pool);
|
||||||
|
|
||||||
typedef struct PgfPrintContext PgfPrintContext;
|
typedef struct PgfPrintContext PgfPrintContext;
|
||||||
|
|||||||
@@ -5,9 +5,6 @@
|
|||||||
#include <pgf/reasoner.h>
|
#include <pgf/reasoner.h>
|
||||||
#include <pgf/reader.h>
|
#include <pgf/reader.h>
|
||||||
#include "lightning.h"
|
#include "lightning.h"
|
||||||
#if defined(__MINGW32__) || defined(_MSC_VER)
|
|
||||||
#include <malloc.h>
|
|
||||||
#endif
|
|
||||||
|
|
||||||
//#define PGF_JIT_DEBUG
|
//#define PGF_JIT_DEBUG
|
||||||
|
|
||||||
@@ -43,18 +40,6 @@ typedef struct {
|
|||||||
#define JIT_VSTATE JIT_V1
|
#define JIT_VSTATE JIT_V1
|
||||||
#define JIT_VCLOS JIT_V2
|
#define JIT_VCLOS JIT_V2
|
||||||
|
|
||||||
#if defined(__MINGW32__) || defined(_MSC_VER)
|
|
||||||
#include <windows.h>
|
|
||||||
|
|
||||||
static int
|
|
||||||
getpagesize()
|
|
||||||
{
|
|
||||||
SYSTEM_INFO system_info;
|
|
||||||
GetSystemInfo(&system_info);
|
|
||||||
return system_info.dwPageSize;
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
|
|
||||||
|
|
||||||
static void
|
static void
|
||||||
pgf_jit_finalize_page(GuFinalizer* self)
|
pgf_jit_finalize_page(GuFinalizer* self)
|
||||||
@@ -65,19 +50,8 @@ pgf_jit_finalize_page(GuFinalizer* self)
|
|||||||
static void
|
static void
|
||||||
pgf_jit_alloc_page(PgfReader* rdr)
|
pgf_jit_alloc_page(PgfReader* rdr)
|
||||||
{
|
{
|
||||||
void *page;
|
size_t page_size;
|
||||||
|
void *page = gu_mem_page_alloc(sizeof(GuFinalizer), &page_size);
|
||||||
size_t page_size = getpagesize();
|
|
||||||
|
|
||||||
#if defined(ANDROID)
|
|
||||||
if ((page = memalign(page_size, page_size)) == NULL) {
|
|
||||||
#elif defined(__MINGW32__) || defined(_MSC_VER)
|
|
||||||
if ((page = malloc(page_size)) == NULL) {
|
|
||||||
#else
|
|
||||||
if (posix_memalign(&page, page_size, page_size) != 0) {
|
|
||||||
#endif
|
|
||||||
gu_fatal("Memory allocation failed");
|
|
||||||
}
|
|
||||||
|
|
||||||
GuFinalizer* fin = page;
|
GuFinalizer* fin = page;
|
||||||
fin->fn = pgf_jit_finalize_page;
|
fin->fn = pgf_jit_finalize_page;
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
#include <pgf/data.h>
|
#include <pgf/data.h>
|
||||||
#include <pgf/expr.h>
|
#include <pgf/expr.h>
|
||||||
#include <pgf/linearizer.h>
|
|
||||||
#include <gu/enum.h>
|
#include <gu/enum.h>
|
||||||
#include <gu/seq.h>
|
#include <gu/seq.h>
|
||||||
#include <gu/assert.h>
|
#include <gu/assert.h>
|
||||||
@@ -62,14 +61,6 @@ typedef struct {
|
|||||||
|
|
||||||
typedef enum { BIND_NONE, BIND_HARD, BIND_SOFT } BIND_TYPE;
|
typedef enum { BIND_NONE, BIND_HARD, BIND_SOFT } BIND_TYPE;
|
||||||
|
|
||||||
typedef struct {
|
|
||||||
PgfProductionIdx* idx;
|
|
||||||
size_t offset;
|
|
||||||
size_t sym_idx;
|
|
||||||
} PgfLexiconIdxEntry;
|
|
||||||
|
|
||||||
typedef GuBuf PgfLexiconIdx;
|
|
||||||
|
|
||||||
struct PgfParseState {
|
struct PgfParseState {
|
||||||
PgfParseState* next;
|
PgfParseState* next;
|
||||||
|
|
||||||
@@ -82,8 +73,6 @@ struct PgfParseState {
|
|||||||
size_t end_offset;
|
size_t end_offset;
|
||||||
|
|
||||||
prob_t viterbi_prob;
|
prob_t viterbi_prob;
|
||||||
|
|
||||||
PgfLexiconIdx* lexicon_idx;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
typedef struct PgfAnswers {
|
typedef struct PgfAnswers {
|
||||||
@@ -282,73 +271,13 @@ pgf_item_symbols(PgfItem* item,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static void
|
PGF_INTERNAL void
|
||||||
pgf_print_production_args(PgfPArgs* args,
|
pgf_print_production_args(PgfPArgs* args,
|
||||||
GuOut* out, GuExn* err)
|
GuOut* out, GuExn* err);
|
||||||
{
|
|
||||||
size_t n_args = gu_seq_length(args);
|
|
||||||
for (size_t j = 0; j < n_args; j++) {
|
|
||||||
if (j > 0)
|
|
||||||
gu_putc(',',out,err);
|
|
||||||
|
|
||||||
PgfPArg arg = gu_seq_get(args, PgfPArg, j);
|
PGF_INTERNAL void
|
||||||
|
|
||||||
if (arg.hypos != NULL &&
|
|
||||||
gu_seq_length(arg.hypos) > 0) {
|
|
||||||
size_t n_hypos = gu_seq_length(arg.hypos);
|
|
||||||
for (size_t k = 0; k < n_hypos; k++) {
|
|
||||||
PgfCCat *hypo = gu_seq_get(arg.hypos, PgfCCat*, k);
|
|
||||||
pgf_print_fid(hypo->fid, out, err);
|
|
||||||
gu_putc(' ',out,err);
|
|
||||||
}
|
|
||||||
gu_puts("-> ",out,err);
|
|
||||||
}
|
|
||||||
|
|
||||||
pgf_print_fid(arg.ccat->fid, out, err);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static void
|
|
||||||
pgf_print_production(int fid, PgfProduction prod,
|
pgf_print_production(int fid, PgfProduction prod,
|
||||||
GuOut *out, GuExn* err, GuPool* pool)
|
GuOut *out, GuExn* err);
|
||||||
{
|
|
||||||
pgf_print_fid(fid, out, err);
|
|
||||||
gu_puts(" -> ", out, err);
|
|
||||||
|
|
||||||
GuVariantInfo i = gu_variant_open(prod);
|
|
||||||
switch (i.tag) {
|
|
||||||
case PGF_PRODUCTION_APPLY: {
|
|
||||||
PgfProductionApply* papp = i.data;
|
|
||||||
gu_printf(out,err,"F%d(",papp->fun->funid);
|
|
||||||
if (papp->fun->ep != NULL) {
|
|
||||||
pgf_print_expr(papp->fun->ep->expr, NULL, 0, out, err);
|
|
||||||
} else {
|
|
||||||
PgfPArg* parg = gu_seq_index(papp->args, PgfPArg, 0);
|
|
||||||
gu_printf(out,err,"linref %s", parg->ccat->cnccat->abscat->name);
|
|
||||||
}
|
|
||||||
gu_printf(out,err,")[");
|
|
||||||
pgf_print_production_args(papp->args,out,err);
|
|
||||||
gu_printf(out,err,"]\n");
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case PGF_PRODUCTION_COERCE: {
|
|
||||||
PgfProductionCoerce* pcoerce = i.data;
|
|
||||||
gu_puts("_[",out,err);
|
|
||||||
pgf_print_fid(pcoerce->coerce->fid, out, err);
|
|
||||||
gu_printf("]\n",out,err);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case PGF_PRODUCTION_EXTERN: {
|
|
||||||
PgfProductionExtern* pext = i.data;
|
|
||||||
gu_printf(out,err,"<extern>(");
|
|
||||||
pgf_print_expr(pext->ep->expr, NULL, 0, out, err);
|
|
||||||
gu_printf(out,err,")[]\n");
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
gu_impossible();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static void
|
static void
|
||||||
pgf_print_item_seq(PgfItem *item,
|
pgf_print_item_seq(PgfItem *item,
|
||||||
@@ -502,39 +431,12 @@ pgf_print_expr_state0(PgfExprState* st,
|
|||||||
#endif
|
#endif
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
static int
|
PGF_INTERNAL_DECL int
|
||||||
cmp_string(GuString* psent, GuString tok, bool case_sensitive)
|
cmp_string(PgfCohortSpot* spot, GuString tok,
|
||||||
{
|
bool case_sensitive);
|
||||||
for (;;) {
|
|
||||||
GuUCS c2 = gu_utf8_decode((const uint8_t**) &tok);
|
|
||||||
if (c2 == 0)
|
|
||||||
return 0;
|
|
||||||
|
|
||||||
const uint8_t* p = (uint8_t*) *psent;
|
PGF_INTERNAL_DECL bool
|
||||||
GuUCS c1 = gu_utf8_decode(&p);
|
skip_space(GuString* psent, size_t* ppos);
|
||||||
if (c1 == 0)
|
|
||||||
return -1;
|
|
||||||
|
|
||||||
if (!case_sensitive)
|
|
||||||
c1 = gu_ucs_to_lower(c1);
|
|
||||||
|
|
||||||
if (c1 != c2)
|
|
||||||
return (c1-c2);
|
|
||||||
|
|
||||||
*psent = (GuString) p;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static bool
|
|
||||||
skip_space(GuString* psent)
|
|
||||||
{
|
|
||||||
const uint8_t* p = (uint8_t*) *psent;
|
|
||||||
if (!gu_ucs_is_space(gu_utf8_decode(&p)))
|
|
||||||
return false;
|
|
||||||
|
|
||||||
*psent = (GuString) p;
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
static int
|
static int
|
||||||
cmp_item_prob(GuOrder* self, const void* a, const void* b)
|
cmp_item_prob(GuOrder* self, const void* a, const void* b)
|
||||||
@@ -852,6 +754,25 @@ static void
|
|||||||
pgf_result_production(PgfParsing* ps,
|
pgf_result_production(PgfParsing* ps,
|
||||||
PgfAnswers* answers, PgfProduction prod);
|
PgfAnswers* answers, PgfProduction prod);
|
||||||
|
|
||||||
|
static void
|
||||||
|
pgf_parsing_push_item(PgfParseState* state, PgfItem* item)
|
||||||
|
{
|
||||||
|
if (gu_buf_length(state->agenda) == 0) {
|
||||||
|
state->viterbi_prob =
|
||||||
|
item->inside_prob+item->conts->outside_prob;
|
||||||
|
}
|
||||||
|
gu_buf_heap_push(state->agenda, pgf_item_prob_order, &item);
|
||||||
|
}
|
||||||
|
|
||||||
|
static void
|
||||||
|
pgf_parsing_push_production(PgfParsing* ps, PgfParseState* state,
|
||||||
|
PgfItemConts* conts, PgfProduction prod)
|
||||||
|
{
|
||||||
|
PgfItem* item =
|
||||||
|
pgf_new_item(ps, conts, prod);
|
||||||
|
gu_buf_heap_push(state->agenda, pgf_item_prob_order, &item);
|
||||||
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
pgf_parsing_combine(PgfParsing* ps,
|
pgf_parsing_combine(PgfParsing* ps,
|
||||||
PgfParseState* before, PgfParseState* after,
|
PgfParseState* before, PgfParseState* after,
|
||||||
@@ -874,16 +795,7 @@ pgf_parsing_combine(PgfParsing* ps,
|
|||||||
}
|
}
|
||||||
|
|
||||||
pgf_item_advance(item, ps->pool);
|
pgf_item_advance(item, ps->pool);
|
||||||
gu_buf_heap_push(before->agenda, pgf_item_prob_order, &item);
|
pgf_parsing_push_item(before, item);
|
||||||
}
|
|
||||||
|
|
||||||
static void
|
|
||||||
pgf_parsing_production(PgfParsing* ps, PgfParseState* state,
|
|
||||||
PgfItemConts* conts, PgfProduction prod)
|
|
||||||
{
|
|
||||||
PgfItem* item =
|
|
||||||
pgf_new_item(ps, conts, prod);
|
|
||||||
gu_buf_heap_push(state->agenda, pgf_item_prob_order, &item);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static PgfProduction
|
static PgfProduction
|
||||||
@@ -994,7 +906,7 @@ pgf_parsing_complete(PgfParsing* ps, PgfItem* item, PgfExprProb *ep)
|
|||||||
pgf_print_fid(ccat->fid, out, err);
|
pgf_print_fid(ccat->fid, out, err);
|
||||||
gu_puts("]\n", out, err);
|
gu_puts("]\n", out, err);
|
||||||
}
|
}
|
||||||
pgf_print_production(ccat->fid, prod, out, err, tmp_pool);
|
pgf_print_production(ccat->fid, prod, out, err);
|
||||||
gu_pool_free(tmp_pool);
|
gu_pool_free(tmp_pool);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
@@ -1019,7 +931,7 @@ pgf_parsing_complete(PgfParsing* ps, PgfItem* item, PgfExprProb *ep)
|
|||||||
* production immediately to the agenda,
|
* production immediately to the agenda,
|
||||||
* i.e. process it. */
|
* i.e. process it. */
|
||||||
if (conts2) {
|
if (conts2) {
|
||||||
pgf_parsing_production(ps, ps->before, conts2, prod);
|
pgf_parsing_push_production(ps, ps->before, conts2, prod);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1040,7 +952,7 @@ pgf_parsing_complete(PgfParsing* ps, PgfItem* item, PgfExprProb *ep)
|
|||||||
* production immediately to the agenda,
|
* production immediately to the agenda,
|
||||||
* i.e. process it. */
|
* i.e. process it. */
|
||||||
if (conts2) {
|
if (conts2) {
|
||||||
pgf_parsing_production(ps, state, conts2, prod);
|
pgf_parsing_push_production(ps, state, conts2, prod);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1060,118 +972,9 @@ pgf_parsing_complete(PgfParsing* ps, PgfItem* item, PgfExprProb *ep)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static int
|
|
||||||
pgf_symbols_cmp(GuString* psent, PgfSymbols* syms, size_t* sym_idx, bool case_sensitive)
|
|
||||||
{
|
|
||||||
size_t n_syms = gu_seq_length(syms);
|
|
||||||
while (*sym_idx < n_syms) {
|
|
||||||
PgfSymbol sym = gu_seq_get(syms, PgfSymbol, *sym_idx);
|
|
||||||
|
|
||||||
if (*sym_idx > 0) {
|
|
||||||
if (!skip_space(psent)) {
|
|
||||||
if (**psent == 0)
|
|
||||||
return -1;
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
while (**psent != 0) {
|
|
||||||
if (!skip_space(psent))
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
GuVariantInfo inf = gu_variant_open(sym);
|
|
||||||
switch (inf.tag) {
|
|
||||||
case PGF_SYMBOL_CAT:
|
|
||||||
case PGF_SYMBOL_LIT:
|
|
||||||
case PGF_SYMBOL_VAR: {
|
|
||||||
if (**psent == 0)
|
|
||||||
return -1;
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
case PGF_SYMBOL_KS: {
|
|
||||||
PgfSymbolKS* pks = inf.data;
|
|
||||||
if (**psent == 0)
|
|
||||||
return -1;
|
|
||||||
|
|
||||||
int cmp = cmp_string(psent, pks->token, case_sensitive);
|
|
||||||
if (cmp != 0)
|
|
||||||
return cmp;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case PGF_SYMBOL_KP:
|
|
||||||
case PGF_SYMBOL_BIND:
|
|
||||||
case PGF_SYMBOL_NE:
|
|
||||||
case PGF_SYMBOL_SOFT_BIND:
|
|
||||||
case PGF_SYMBOL_SOFT_SPACE:
|
|
||||||
case PGF_SYMBOL_CAPIT:
|
|
||||||
case PGF_SYMBOL_ALL_CAPIT: {
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
gu_impossible();
|
|
||||||
}
|
|
||||||
|
|
||||||
(*sym_idx)++;
|
|
||||||
}
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
static void
|
|
||||||
pgf_parsing_lookahead(PgfParsing *ps, PgfParseState* state,
|
|
||||||
int i, int j, ptrdiff_t min, ptrdiff_t max)
|
|
||||||
{
|
|
||||||
// This is a variation of a binary search algorithm which
|
|
||||||
// can retrieve all prefixes of a string with minimal
|
|
||||||
// comparisons, i.e. there is no need to lookup every
|
|
||||||
// prefix separately.
|
|
||||||
|
|
||||||
while (i <= j) {
|
|
||||||
int k = (i+j) / 2;
|
|
||||||
PgfSequence* seq = gu_seq_index(ps->concr->sequences, PgfSequence, k);
|
|
||||||
|
|
||||||
GuString start = ps->sentence + state->end_offset;
|
|
||||||
GuString current = start;
|
|
||||||
size_t sym_idx = 0;
|
|
||||||
int cmp = pgf_symbols_cmp(¤t, seq->syms, &sym_idx, ps->case_sensitive);
|
|
||||||
if (cmp < 0) {
|
|
||||||
j = k-1;
|
|
||||||
} else if (cmp > 0) {
|
|
||||||
ptrdiff_t len = current - start;
|
|
||||||
|
|
||||||
if (min <= len)
|
|
||||||
pgf_parsing_lookahead(ps, state, i, k-1, min, len);
|
|
||||||
|
|
||||||
if (len+1 <= max)
|
|
||||||
pgf_parsing_lookahead(ps, state, k+1, j, len+1, max);
|
|
||||||
|
|
||||||
break;
|
|
||||||
} else {
|
|
||||||
ptrdiff_t len = current - start;
|
|
||||||
|
|
||||||
if (min <= len-1)
|
|
||||||
pgf_parsing_lookahead(ps, state, i, k-1, min, len-1);
|
|
||||||
|
|
||||||
if (seq->idx != NULL) {
|
|
||||||
PgfLexiconIdxEntry* entry = gu_buf_extend(state->lexicon_idx);
|
|
||||||
entry->idx = seq->idx;
|
|
||||||
entry->offset = (size_t) (current - ps->sentence);
|
|
||||||
entry->sym_idx = sym_idx;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (len+1 <= max)
|
|
||||||
pgf_parsing_lookahead(ps, state, k+1, j, len+1, max);
|
|
||||||
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static PgfParseState*
|
static PgfParseState*
|
||||||
pgf_new_parse_state(PgfParsing* ps, size_t start_offset,
|
pgf_new_parse_state(PgfParsing* ps, size_t start_offset,
|
||||||
BIND_TYPE bind_type,
|
BIND_TYPE bind_type)
|
||||||
prob_t viterbi_prob)
|
|
||||||
{
|
{
|
||||||
PgfParseState** pstate;
|
PgfParseState** pstate;
|
||||||
if (ps->before == NULL && start_offset == 0)
|
if (ps->before == NULL && start_offset == 0)
|
||||||
@@ -1206,7 +1009,8 @@ pgf_new_parse_state(PgfParsing* ps, size_t start_offset,
|
|||||||
|
|
||||||
size_t end_offset = start_offset;
|
size_t end_offset = start_offset;
|
||||||
GuString current = ps->sentence + end_offset;
|
GuString current = ps->sentence + end_offset;
|
||||||
while (skip_space(¤t)) {
|
size_t pos = 0;
|
||||||
|
while (skip_space(¤t, &pos)) {
|
||||||
end_offset++;
|
end_offset++;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1222,43 +1026,173 @@ pgf_new_parse_state(PgfParsing* ps, size_t start_offset,
|
|||||||
(start_offset == end_offset);
|
(start_offset == end_offset);
|
||||||
state->start_offset = start_offset;
|
state->start_offset = start_offset;
|
||||||
state->end_offset = end_offset;
|
state->end_offset = end_offset;
|
||||||
state->viterbi_prob = viterbi_prob;
|
state->viterbi_prob = 0;
|
||||||
state->lexicon_idx =
|
|
||||||
gu_new_buf(PgfLexiconIdxEntry, ps->pool);
|
|
||||||
|
|
||||||
if (ps->before == NULL && start_offset == 0)
|
if (ps->before == NULL && start_offset == 0)
|
||||||
state->needs_bind = false;
|
state->needs_bind = false;
|
||||||
|
|
||||||
if (gu_seq_length(ps->concr->sequences) > 0) {
|
|
||||||
// Add epsilon lexical rules to the bottom up index
|
|
||||||
PgfSequence* seq = gu_seq_index(ps->concr->sequences, PgfSequence, 0);
|
|
||||||
if (gu_seq_length(seq->syms) == 0 && seq->idx != NULL) {
|
|
||||||
PgfLexiconIdxEntry* entry = gu_buf_extend(state->lexicon_idx);
|
|
||||||
entry->idx = seq->idx;
|
|
||||||
entry->offset = state->start_offset;
|
|
||||||
entry->sym_idx= 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add non-epsilon lexical rules to the bottom up index
|
|
||||||
if (!state->needs_bind) {
|
|
||||||
pgf_parsing_lookahead(ps, state,
|
|
||||||
0, gu_seq_length(ps->concr->sequences)-1,
|
|
||||||
1, strlen(ps->sentence)-state->end_offset);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
*pstate = state;
|
*pstate = state;
|
||||||
|
|
||||||
return state;
|
return state;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
PGF_INTERNAL_DECL int
|
||||||
|
pgf_symbols_cmp(PgfCohortSpot* spot,
|
||||||
|
PgfSymbols* syms, size_t* sym_idx,
|
||||||
|
bool case_sensitive);
|
||||||
|
|
||||||
|
static bool
|
||||||
|
pgf_parsing_scan_helper(PgfParsing *ps, PgfParseState* state,
|
||||||
|
int i, int j, ptrdiff_t min, ptrdiff_t max)
|
||||||
|
{
|
||||||
|
// This is a variation of a binary search algorithm which
|
||||||
|
// can retrieve all prefixes of a string with minimal
|
||||||
|
// comparisons, i.e. there is no need to lookup every
|
||||||
|
// prefix separately.
|
||||||
|
|
||||||
|
bool found = false;
|
||||||
|
while (i <= j) {
|
||||||
|
int k = (i+j) / 2;
|
||||||
|
PgfSequence* seq = gu_seq_index(ps->concr->sequences, PgfSequence, k);
|
||||||
|
|
||||||
|
PgfCohortSpot start = {0, ps->sentence+state->end_offset};
|
||||||
|
PgfCohortSpot current = start;
|
||||||
|
|
||||||
|
size_t sym_idx = 0;
|
||||||
|
int cmp = pgf_symbols_cmp(¤t, seq->syms, &sym_idx, ps->case_sensitive);
|
||||||
|
if (cmp < 0) {
|
||||||
|
j = k-1;
|
||||||
|
} else if (cmp > 0) {
|
||||||
|
ptrdiff_t len = current.ptr - start.ptr;
|
||||||
|
|
||||||
|
if (min <= len)
|
||||||
|
if (pgf_parsing_scan_helper(ps, state, i, k-1, min, len))
|
||||||
|
found = true;
|
||||||
|
|
||||||
|
if (len+1 <= max)
|
||||||
|
if (pgf_parsing_scan_helper(ps, state, k+1, j, len+1, max))
|
||||||
|
found = true;
|
||||||
|
|
||||||
|
break;
|
||||||
|
} else {
|
||||||
|
ptrdiff_t len = current.ptr - start.ptr;
|
||||||
|
found = true;
|
||||||
|
|
||||||
|
if (min <= len)
|
||||||
|
pgf_parsing_scan_helper(ps, state, i, k-1, min, len);
|
||||||
|
|
||||||
|
// Here we do bottom-up prediction for all lexical categories.
|
||||||
|
// The epsilon productions will be predicted in top-down
|
||||||
|
// fashion while parsing.
|
||||||
|
if (seq->idx != NULL && len > 0) {
|
||||||
|
// A new state will mark the end of the current match
|
||||||
|
PgfParseState* new_state =
|
||||||
|
pgf_new_parse_state(ps, (size_t) (current.ptr - ps->sentence), BIND_NONE);
|
||||||
|
|
||||||
|
// Bottom-up prediction for lexical rules
|
||||||
|
size_t n_entries = gu_buf_length(seq->idx);
|
||||||
|
for (size_t i = 0; i < n_entries; i++) {
|
||||||
|
PgfProductionIdxEntry* entry =
|
||||||
|
gu_buf_index(seq->idx, PgfProductionIdxEntry, i);
|
||||||
|
|
||||||
|
PgfItemConts* conts =
|
||||||
|
pgf_parsing_get_conts(state,
|
||||||
|
entry->ccat, entry->lin_idx,
|
||||||
|
ps->pool);
|
||||||
|
|
||||||
|
// Create the new category if it doesn't exist yet
|
||||||
|
PgfCCat* tmp_ccat = pgf_parsing_get_completed(new_state, conts);
|
||||||
|
PgfCCat* ccat = tmp_ccat;
|
||||||
|
if (ccat == NULL) {
|
||||||
|
ccat = pgf_parsing_create_completed(ps, new_state, conts, INFINITY);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add the production
|
||||||
|
if (ccat->prods == NULL || ccat->n_synprods >= gu_seq_length(ccat->prods)) {
|
||||||
|
ccat->prods = gu_realloc_seq(ccat->prods, PgfProduction, ccat->n_synprods+1);
|
||||||
|
}
|
||||||
|
GuVariantInfo i;
|
||||||
|
i.tag = PGF_PRODUCTION_APPLY;
|
||||||
|
i.data = entry->papp;
|
||||||
|
PgfProduction prod = gu_variant_close(i);
|
||||||
|
gu_seq_set(ccat->prods, PgfProduction, ccat->n_synprods++, prod);
|
||||||
|
|
||||||
|
// Update the category's probability to be minimum
|
||||||
|
if (ccat->viterbi_prob > entry->papp->fun->ep->prob)
|
||||||
|
ccat->viterbi_prob = entry->papp->fun->ep->prob;
|
||||||
|
|
||||||
|
#ifdef PGF_PARSER_DEBUG
|
||||||
|
GuPool* tmp_pool = gu_new_pool();
|
||||||
|
GuOut* out = gu_file_out(stderr, tmp_pool);
|
||||||
|
GuExn* err = gu_exn(tmp_pool);
|
||||||
|
if (tmp_ccat == NULL) {
|
||||||
|
gu_printf(out, err, "[");
|
||||||
|
pgf_print_range(state, new_state, out, err);
|
||||||
|
gu_puts("; ", out, err);
|
||||||
|
pgf_print_fid(conts->ccat->fid, out, err);
|
||||||
|
gu_printf(out, err, "; %d; ",
|
||||||
|
conts->lin_idx);
|
||||||
|
pgf_print_fid(ccat->fid, out, err);
|
||||||
|
gu_puts("]\n", out, err);
|
||||||
|
}
|
||||||
|
pgf_print_production(ccat->fid, prod, out, err);
|
||||||
|
gu_pool_free(tmp_pool);
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (len <= max)
|
||||||
|
pgf_parsing_scan_helper(ps, state, k+1, j, len, max);
|
||||||
|
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return found;
|
||||||
|
}
|
||||||
|
|
||||||
|
static void
|
||||||
|
pgf_parsing_scan(PgfParsing *ps)
|
||||||
|
{
|
||||||
|
size_t len = strlen(ps->sentence);
|
||||||
|
|
||||||
|
PgfParseState* state =
|
||||||
|
pgf_new_parse_state(ps, 0, BIND_SOFT);
|
||||||
|
|
||||||
|
while (state != NULL) {
|
||||||
|
if (state->needs_bind) {
|
||||||
|
// We have encountered two tokens without space in between.
|
||||||
|
// Those can be accepted only if there is a BIND token
|
||||||
|
// in between. We encode this by having one more state
|
||||||
|
// at the same offset. A transition between these two
|
||||||
|
// states is possible only with the BIND token.
|
||||||
|
state =
|
||||||
|
pgf_new_parse_state(ps, state->end_offset, BIND_HARD);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!pgf_parsing_scan_helper
|
||||||
|
(ps, state,
|
||||||
|
0, gu_seq_length(ps->concr->sequences)-1,
|
||||||
|
1, len-state->end_offset)) {
|
||||||
|
// skip one character and try again
|
||||||
|
GuString s = ps->sentence+state->end_offset;
|
||||||
|
gu_utf8_decode((const uint8_t**) &s);
|
||||||
|
pgf_new_parse_state(ps, ps->sentence-s, BIND_NONE);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (state == ps->before)
|
||||||
|
state = ps->after;
|
||||||
|
else
|
||||||
|
state = state->next;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
pgf_parsing_add_transition(PgfParsing* ps, PgfToken tok, PgfItem* item)
|
pgf_parsing_add_transition(PgfParsing* ps, PgfToken tok, PgfItem* item)
|
||||||
{
|
{
|
||||||
GuString current = ps->sentence + ps->before->end_offset;
|
PgfCohortSpot current = {0, ps->sentence + ps->before->end_offset};
|
||||||
|
|
||||||
if (ps->prefix != NULL && *current == 0) {
|
if (ps->prefix != NULL && *current.ptr == 0) {
|
||||||
if (gu_string_is_prefix(ps->prefix, tok)) {
|
if (gu_string_is_prefix(ps->prefix, tok)) {
|
||||||
PgfProductionApply* papp = gu_variant_data(item->prod);
|
PgfProductionApply* papp = gu_variant_data(item->prod);
|
||||||
|
|
||||||
@@ -1271,37 +1205,15 @@ pgf_parsing_add_transition(PgfParsing* ps, PgfToken tok, PgfItem* item)
|
|||||||
} else {
|
} else {
|
||||||
if (!ps->before->needs_bind && cmp_string(¤t, tok, ps->case_sensitive) == 0) {
|
if (!ps->before->needs_bind && cmp_string(¤t, tok, ps->case_sensitive) == 0) {
|
||||||
PgfParseState* state =
|
PgfParseState* state =
|
||||||
pgf_new_parse_state(ps, (current - ps->sentence),
|
pgf_new_parse_state(ps, (current.ptr - ps->sentence),
|
||||||
BIND_NONE,
|
BIND_NONE);
|
||||||
item->inside_prob+item->conts->outside_prob);
|
pgf_parsing_push_item(state, item);
|
||||||
gu_buf_heap_push(state->agenda, pgf_item_prob_order, &item);
|
|
||||||
} else {
|
} else {
|
||||||
pgf_item_free(ps, item);
|
pgf_item_free(ps, item);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static void
|
|
||||||
pgf_parsing_predict_lexeme(PgfParsing* ps, PgfItemConts* conts,
|
|
||||||
PgfProductionIdxEntry* entry,
|
|
||||||
size_t offset, size_t sym_idx)
|
|
||||||
{
|
|
||||||
GuVariantInfo i = { PGF_PRODUCTION_APPLY, entry->papp };
|
|
||||||
PgfProduction prod = gu_variant_close(i);
|
|
||||||
PgfItem* item =
|
|
||||||
pgf_new_item(ps, conts, prod);
|
|
||||||
PgfSymbols* syms = entry->papp->fun->lins[conts->lin_idx]->syms;
|
|
||||||
item->sym_idx = sym_idx;
|
|
||||||
pgf_item_set_curr_symbol(item, ps->pool);
|
|
||||||
prob_t prob = item->inside_prob+item->conts->outside_prob;
|
|
||||||
PgfParseState* state =
|
|
||||||
pgf_new_parse_state(ps, offset, BIND_NONE, prob);
|
|
||||||
if (state->viterbi_prob > prob) {
|
|
||||||
state->viterbi_prob = prob;
|
|
||||||
}
|
|
||||||
gu_buf_heap_push(state->agenda, pgf_item_prob_order, &item);
|
|
||||||
}
|
|
||||||
|
|
||||||
static void
|
static void
|
||||||
pgf_parsing_td_predict(PgfParsing* ps,
|
pgf_parsing_td_predict(PgfParsing* ps,
|
||||||
PgfItem* item, PgfCCat* ccat, size_t lin_idx)
|
PgfItem* item, PgfCCat* ccat, size_t lin_idx)
|
||||||
@@ -1339,44 +1251,46 @@ pgf_parsing_td_predict(PgfParsing* ps,
|
|||||||
for (size_t i = 0; i < n_prods; i++) {
|
for (size_t i = 0; i < n_prods; i++) {
|
||||||
PgfProduction prod =
|
PgfProduction prod =
|
||||||
gu_seq_get(ccat->prods, PgfProduction, i);
|
gu_seq_get(ccat->prods, PgfProduction, i);
|
||||||
pgf_parsing_production(ps, ps->before, conts, prod);
|
pgf_parsing_push_production(ps, ps->before, conts, prod);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Top-down prediction for syntactic rules
|
// Top-down prediction for syntactic rules
|
||||||
for (size_t i = 0; i < ccat->n_synprods; i++) {
|
for (size_t i = 0; i < ccat->n_synprods; i++) {
|
||||||
PgfProduction prod =
|
PgfProduction prod =
|
||||||
gu_seq_get(ccat->prods, PgfProduction, i);
|
gu_seq_get(ccat->prods, PgfProduction, i);
|
||||||
pgf_parsing_production(ps, ps->before, conts, prod);
|
pgf_parsing_push_production(ps, ps->before, conts, prod);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Bottom-up prediction for lexical and epsilon rules
|
// Top-down prediction for epsilon lexical rules if any
|
||||||
size_t n_idcs = gu_buf_length(ps->before->lexicon_idx);
|
PgfSequence* seq = gu_seq_index(ps->concr->sequences, PgfSequence, 0);
|
||||||
for (size_t i = 0; i < n_idcs; i++) {
|
if (gu_seq_length(seq->syms) == 0 && seq->idx != NULL) {
|
||||||
PgfLexiconIdxEntry* lentry =
|
|
||||||
gu_buf_index(ps->before->lexicon_idx, PgfLexiconIdxEntry, i);
|
|
||||||
|
|
||||||
PgfProductionIdxEntry key;
|
PgfProductionIdxEntry key;
|
||||||
key.ccat = ccat;
|
key.ccat = ccat;
|
||||||
key.lin_idx = lin_idx;
|
key.lin_idx = lin_idx;
|
||||||
key.papp = NULL;
|
key.papp = NULL;
|
||||||
PgfProductionIdxEntry* value =
|
PgfProductionIdxEntry* value =
|
||||||
gu_seq_binsearch(gu_buf_data_seq(lentry->idx),
|
gu_seq_binsearch(gu_buf_data_seq(seq->idx),
|
||||||
pgf_production_idx_entry_order,
|
pgf_production_idx_entry_order,
|
||||||
PgfProductionIdxEntry, &key);
|
PgfProductionIdxEntry, &key);
|
||||||
|
|
||||||
if (value != NULL) {
|
if (value != NULL) {
|
||||||
pgf_parsing_predict_lexeme(ps, conts, value, lentry->offset, lentry->sym_idx);
|
GuVariantInfo i = { PGF_PRODUCTION_APPLY, value->papp };
|
||||||
|
PgfProduction prod = gu_variant_close(i);
|
||||||
|
pgf_parsing_push_production(ps, ps->before, conts, prod);
|
||||||
|
|
||||||
PgfProductionIdxEntry* start =
|
PgfProductionIdxEntry* start =
|
||||||
gu_buf_data(lentry->idx);
|
gu_buf_data(seq->idx);
|
||||||
PgfProductionIdxEntry* end =
|
PgfProductionIdxEntry* end =
|
||||||
start + gu_buf_length(lentry->idx)-1;
|
start + gu_buf_length(seq->idx)-1;
|
||||||
|
|
||||||
PgfProductionIdxEntry* left = value-1;
|
PgfProductionIdxEntry* left = value-1;
|
||||||
while (left >= start &&
|
while (left >= start &&
|
||||||
value->ccat->fid == left->ccat->fid &&
|
value->ccat->fid == left->ccat->fid &&
|
||||||
value->lin_idx == left->lin_idx) {
|
value->lin_idx == left->lin_idx) {
|
||||||
pgf_parsing_predict_lexeme(ps, conts, left, lentry->offset, lentry->sym_idx);
|
GuVariantInfo i = { PGF_PRODUCTION_APPLY, left->papp };
|
||||||
|
PgfProduction prod = gu_variant_close(i);
|
||||||
|
pgf_parsing_push_production(ps, ps->before, conts, prod);
|
||||||
left--;
|
left--;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1384,31 +1298,32 @@ pgf_parsing_td_predict(PgfParsing* ps,
|
|||||||
while (right <= end &&
|
while (right <= end &&
|
||||||
value->ccat->fid == right->ccat->fid &&
|
value->ccat->fid == right->ccat->fid &&
|
||||||
value->lin_idx == right->lin_idx) {
|
value->lin_idx == right->lin_idx) {
|
||||||
pgf_parsing_predict_lexeme(ps, conts, right, lentry->offset, lentry->sym_idx);
|
GuVariantInfo i = { PGF_PRODUCTION_APPLY, right->papp };
|
||||||
|
PgfProduction prod = gu_variant_close(i);
|
||||||
|
pgf_parsing_push_production(ps, ps->before, conts, prod);
|
||||||
right++;
|
right++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
}
|
||||||
/* If it has already been completed, combine. */
|
|
||||||
|
|
||||||
|
/* If the category has already been completed, combine. */
|
||||||
|
PgfCCat* completed =
|
||||||
|
pgf_parsing_get_completed(ps->before, conts);
|
||||||
|
if (completed) {
|
||||||
|
pgf_parsing_combine(ps, ps->before, ps->after, item, completed, lin_idx);
|
||||||
|
}
|
||||||
|
|
||||||
|
PgfParseState* state = ps->after;
|
||||||
|
while (state != NULL) {
|
||||||
PgfCCat* completed =
|
PgfCCat* completed =
|
||||||
pgf_parsing_get_completed(ps->before, conts);
|
pgf_parsing_get_completed(state, conts);
|
||||||
if (completed) {
|
if (completed) {
|
||||||
pgf_parsing_combine(ps, ps->before, ps->after, item, completed, lin_idx);
|
pgf_parsing_combine(ps, state, state->next, item, completed, lin_idx);
|
||||||
}
|
}
|
||||||
|
|
||||||
PgfParseState* state = ps->after;
|
state = state->next;
|
||||||
while (state != NULL) {
|
|
||||||
PgfCCat* completed =
|
|
||||||
pgf_parsing_get_completed(state, conts);
|
|
||||||
if (completed) {
|
|
||||||
pgf_parsing_combine(ps, state, state->next, item, completed, lin_idx);
|
|
||||||
}
|
|
||||||
|
|
||||||
state = state->next;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1424,7 +1339,7 @@ pgf_parsing_pre(PgfParsing* ps, PgfItem* item, PgfSymbols* syms)
|
|||||||
} else {
|
} else {
|
||||||
item->alt = 0;
|
item->alt = 0;
|
||||||
pgf_item_advance(item, ps->pool);
|
pgf_item_advance(item, ps->pool);
|
||||||
gu_buf_heap_push(ps->before->agenda, pgf_item_prob_order, &item);
|
pgf_parsing_push_item(ps->before, item);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1454,7 +1369,6 @@ pgf_parsing_symbol(PgfParsing* ps, PgfItem* item, PgfSymbol sym)
|
|||||||
case PGF_SYMBOL_KP: {
|
case PGF_SYMBOL_KP: {
|
||||||
PgfSymbolKP* skp = gu_variant_data(sym);
|
PgfSymbolKP* skp = gu_variant_data(sym);
|
||||||
|
|
||||||
PgfSymbol sym;
|
|
||||||
if (item->alt == 0) {
|
if (item->alt == 0) {
|
||||||
PgfItem* new_item;
|
PgfItem* new_item;
|
||||||
|
|
||||||
@@ -1544,9 +1458,8 @@ pgf_parsing_symbol(PgfParsing* ps, PgfItem* item, PgfSymbol sym)
|
|||||||
item->curr_sym = pgf_collect_extern_tok(ps,start,offset);
|
item->curr_sym = pgf_collect_extern_tok(ps,start,offset);
|
||||||
item->sym_idx = pgf_item_symbols_length(item);
|
item->sym_idx = pgf_item_symbols_length(item);
|
||||||
PgfParseState* state =
|
PgfParseState* state =
|
||||||
pgf_new_parse_state(ps, offset, BIND_NONE,
|
pgf_new_parse_state(ps, offset, BIND_NONE);
|
||||||
item->inside_prob+item->conts->outside_prob);
|
pgf_parsing_push_item(state, item);
|
||||||
gu_buf_heap_push(state->agenda, pgf_item_prob_order, &item);
|
|
||||||
match = true;
|
match = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1589,11 +1502,10 @@ pgf_parsing_symbol(PgfParsing* ps, PgfItem* item, PgfSymbol sym)
|
|||||||
if (ps->before->start_offset == ps->before->end_offset &&
|
if (ps->before->start_offset == ps->before->end_offset &&
|
||||||
ps->before->needs_bind) {
|
ps->before->needs_bind) {
|
||||||
PgfParseState* state =
|
PgfParseState* state =
|
||||||
pgf_new_parse_state(ps, ps->before->end_offset, BIND_HARD,
|
pgf_new_parse_state(ps, ps->before->end_offset, BIND_HARD);
|
||||||
item->inside_prob+item->conts->outside_prob);
|
|
||||||
if (state != NULL) {
|
if (state != NULL) {
|
||||||
pgf_item_advance(item, ps->pool);
|
pgf_item_advance(item, ps->pool);
|
||||||
gu_buf_heap_push(state->agenda, pgf_item_prob_order, &item);
|
pgf_parsing_push_item(state, item);
|
||||||
} else {
|
} else {
|
||||||
pgf_item_free(ps, item);
|
pgf_item_free(ps, item);
|
||||||
}
|
}
|
||||||
@@ -1607,11 +1519,10 @@ pgf_parsing_symbol(PgfParsing* ps, PgfItem* item, PgfSymbol sym)
|
|||||||
if (ps->before->start_offset == ps->before->end_offset) {
|
if (ps->before->start_offset == ps->before->end_offset) {
|
||||||
if (ps->before->needs_bind) {
|
if (ps->before->needs_bind) {
|
||||||
PgfParseState* state =
|
PgfParseState* state =
|
||||||
pgf_new_parse_state(ps, ps->before->end_offset, BIND_HARD,
|
pgf_new_parse_state(ps, ps->before->end_offset, BIND_HARD);
|
||||||
item->inside_prob+item->conts->outside_prob);
|
|
||||||
if (state != NULL) {
|
if (state != NULL) {
|
||||||
pgf_item_advance(item, ps->pool);
|
pgf_item_advance(item, ps->pool);
|
||||||
gu_buf_heap_push(state->agenda, pgf_item_prob_order, &item);
|
pgf_parsing_push_item(state, item);
|
||||||
} else {
|
} else {
|
||||||
pgf_item_free(ps, item);
|
pgf_item_free(ps, item);
|
||||||
}
|
}
|
||||||
@@ -1620,7 +1531,7 @@ pgf_parsing_symbol(PgfParsing* ps, PgfItem* item, PgfSymbol sym)
|
|||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
pgf_item_advance(item, ps->pool);
|
pgf_item_advance(item, ps->pool);
|
||||||
gu_buf_heap_push(ps->before->agenda, pgf_item_prob_order, &item);
|
pgf_parsing_push_item(ps->before, item);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@@ -1722,6 +1633,9 @@ pgf_parsing_set_default_factors(PgfParsing* ps, PgfAbstr* abstr)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
PGF_INTERNAL_DECL bool
|
||||||
|
pgf_is_case_sensitive(PgfConcr* concr);
|
||||||
|
|
||||||
static PgfParsing*
|
static PgfParsing*
|
||||||
pgf_new_parsing(PgfConcr* concr, GuString sentence,
|
pgf_new_parsing(PgfConcr* concr, GuString sentence,
|
||||||
PgfCallbacksMap* callbacks, PgfOracleCallback* oracle,
|
PgfCallbacksMap* callbacks, PgfOracleCallback* oracle,
|
||||||
@@ -1732,8 +1646,7 @@ pgf_new_parsing(PgfConcr* concr, GuString sentence,
|
|||||||
ps->pool = pool;
|
ps->pool = pool;
|
||||||
ps->out_pool = out_pool;
|
ps->out_pool = out_pool;
|
||||||
ps->sentence = sentence;
|
ps->sentence = sentence;
|
||||||
ps->case_sensitive =
|
ps->case_sensitive = pgf_is_case_sensitive(concr);
|
||||||
(gu_seq_binsearch(concr->cflags, pgf_flag_order, PgfFlag, "case_sensitive") == NULL);
|
|
||||||
ps->expr_queue = gu_new_buf(PgfExprState*, pool);
|
ps->expr_queue = gu_new_buf(PgfExprState*, pool);
|
||||||
ps->max_fid = concr->total_cats;
|
ps->max_fid = concr->total_cats;
|
||||||
ps->before = NULL;
|
ps->before = NULL;
|
||||||
@@ -1942,8 +1855,7 @@ pgf_parsing_init(PgfConcr* concr, PgfCId cat,
|
|||||||
ps->heuristic_factor = heuristic_factor;
|
ps->heuristic_factor = heuristic_factor;
|
||||||
}
|
}
|
||||||
|
|
||||||
PgfParseState* state =
|
pgf_parsing_scan(ps);
|
||||||
pgf_new_parse_state(ps, 0, BIND_SOFT, 0);
|
|
||||||
|
|
||||||
int fidString = -1;
|
int fidString = -1;
|
||||||
PgfCCat* start_ccat = gu_new(PgfCCat, ps->pool);
|
PgfCCat* start_ccat = gu_new(PgfCCat, ps->pool);
|
||||||
@@ -1962,7 +1874,7 @@ pgf_parsing_init(PgfConcr* concr, PgfCId cat,
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
PgfItemConts* conts =
|
PgfItemConts* conts =
|
||||||
pgf_parsing_get_conts(state, start_ccat, 0, ps->pool);
|
pgf_parsing_get_conts(ps->before, start_ccat, 0, ps->pool);
|
||||||
gu_buf_push(conts->items, PgfItem*, NULL);
|
gu_buf_push(conts->items, PgfItem*, NULL);
|
||||||
|
|
||||||
#ifdef PGF_COUNTS_DEBUG
|
#ifdef PGF_COUNTS_DEBUG
|
||||||
@@ -1988,7 +1900,7 @@ pgf_parsing_init(PgfConcr* concr, PgfCId cat,
|
|||||||
|
|
||||||
PgfItem* item = gu_new(PgfItem, ps->pool);
|
PgfItem* item = gu_new(PgfItem, ps->pool);
|
||||||
item->args = args;
|
item->args = args;
|
||||||
item->inside_prob += ccat->viterbi_prob;
|
item->inside_prob = 0;
|
||||||
item->conts = conts;
|
item->conts = conts;
|
||||||
item->prod = prod;
|
item->prod = prod;
|
||||||
item->curr_sym = gu_null_variant;
|
item->curr_sym = gu_null_variant;
|
||||||
@@ -2005,7 +1917,7 @@ pgf_parsing_init(PgfConcr* concr, PgfCId cat,
|
|||||||
ps->item_real_count++;
|
ps->item_real_count++;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
gu_buf_heap_push(state->agenda, pgf_item_prob_order, &item);
|
gu_buf_heap_push(ps->before->agenda, pgf_item_prob_order, &item);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -2023,10 +1935,18 @@ pgf_parsing_proceed(PgfParsing* ps)
|
|||||||
best_prob = gu_buf_get(ps->expr_queue, PgfExprState*, 0)->ep.prob;
|
best_prob = gu_buf_get(ps->expr_queue, PgfExprState*, 0)->ep.prob;
|
||||||
}
|
}
|
||||||
|
|
||||||
prob_t delta_prob = 0;
|
PgfParseState* st = ps->before;
|
||||||
PgfParseState* st = ps->before;
|
PgfParseState* last = NULL;
|
||||||
|
prob_t delta_prob = 0;
|
||||||
while (st != NULL) {
|
while (st != NULL) {
|
||||||
if (gu_buf_length(st->agenda) > 0) {
|
if (gu_buf_length(st->agenda) > 0) {
|
||||||
|
if (last != NULL) {
|
||||||
|
delta_prob +=
|
||||||
|
(last->viterbi_prob-st->viterbi_prob) *
|
||||||
|
ps->heuristic_factor;
|
||||||
|
}
|
||||||
|
last = st;
|
||||||
|
|
||||||
PgfItem* item = gu_buf_get(st->agenda, PgfItem*, 0);
|
PgfItem* item = gu_buf_get(st->agenda, PgfItem*, 0);
|
||||||
prob_t item_prob =
|
prob_t item_prob =
|
||||||
item->inside_prob+item->conts->outside_prob+delta_prob;
|
item->inside_prob+item->conts->outside_prob+delta_prob;
|
||||||
@@ -2044,10 +1964,6 @@ pgf_parsing_proceed(PgfParsing* ps)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
prob_t state_delta =
|
|
||||||
(st->viterbi_prob-(st->next ? st->next->viterbi_prob : 0))*
|
|
||||||
ps->heuristic_factor;
|
|
||||||
delta_prob += state_delta;
|
|
||||||
st = st->next;
|
st = st->next;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -2345,173 +2261,6 @@ pgf_complete(PgfConcr* concr, PgfType* type, GuString sentence,
|
|||||||
return &ps->en;
|
return &ps->en;
|
||||||
}
|
}
|
||||||
|
|
||||||
static void
|
|
||||||
pgf_morpho_iter(PgfProductionIdx* idx,
|
|
||||||
PgfMorphoCallback* callback,
|
|
||||||
GuExn* err)
|
|
||||||
{
|
|
||||||
size_t n_entries = gu_buf_length(idx);
|
|
||||||
for (size_t i = 0; i < n_entries; i++) {
|
|
||||||
PgfProductionIdxEntry* entry =
|
|
||||||
gu_buf_index(idx, PgfProductionIdxEntry, i);
|
|
||||||
|
|
||||||
PgfCId lemma = entry->papp->fun->absfun->name;
|
|
||||||
GuString analysis = entry->ccat->cnccat->labels[entry->lin_idx];
|
|
||||||
|
|
||||||
prob_t prob = entry->ccat->cnccat->abscat->prob +
|
|
||||||
entry->papp->fun->absfun->ep.prob;
|
|
||||||
callback->callback(callback,
|
|
||||||
lemma, analysis, prob, err);
|
|
||||||
if (!gu_ok(err))
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
typedef struct {
|
|
||||||
GuOrder order;
|
|
||||||
bool case_sensitive;
|
|
||||||
} PgfSequenceOrder;
|
|
||||||
|
|
||||||
static int
|
|
||||||
pgf_sequence_cmp_fn(GuOrder* order, const void* p1, const void* p2)
|
|
||||||
{
|
|
||||||
PgfSequenceOrder* self = gu_container(order, PgfSequenceOrder, order);
|
|
||||||
GuString sent = (GuString) p1;
|
|
||||||
const PgfSequence* sp2 = p2;
|
|
||||||
|
|
||||||
size_t sym_idx = 0;
|
|
||||||
int res = pgf_symbols_cmp(&sent, sp2->syms, &sym_idx, self->case_sensitive);
|
|
||||||
if (res == 0 && (*sent != 0 || sym_idx != gu_seq_length(sp2->syms))) {
|
|
||||||
res = 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
|
|
||||||
PGF_API void
|
|
||||||
pgf_lookup_morpho(PgfConcr *concr, GuString sentence,
|
|
||||||
PgfMorphoCallback* callback, GuExn* err)
|
|
||||||
{
|
|
||||||
if (concr->sequences == NULL) {
|
|
||||||
GuExnData* err_data = gu_raise(err, PgfExn);
|
|
||||||
if (err_data) {
|
|
||||||
err_data->data = "The concrete syntax is not loaded";
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
bool case_sensitive =
|
|
||||||
(gu_seq_binsearch(concr->cflags, pgf_flag_order, PgfFlag, "case_sensitive") == NULL);
|
|
||||||
|
|
||||||
PgfSequenceOrder order = { { pgf_sequence_cmp_fn }, case_sensitive };
|
|
||||||
PgfSequence* seq = (PgfSequence*)
|
|
||||||
gu_seq_binsearch(concr->sequences, &order.order,
|
|
||||||
PgfSequence, (void*) sentence);
|
|
||||||
|
|
||||||
if (seq != NULL && seq->idx != NULL)
|
|
||||||
pgf_morpho_iter(seq->idx, callback, err);
|
|
||||||
}
|
|
||||||
|
|
||||||
typedef struct {
|
|
||||||
GuEnum en;
|
|
||||||
PgfSequences* sequences;
|
|
||||||
GuString prefix;
|
|
||||||
size_t seq_idx;
|
|
||||||
} PgfFullFormState;
|
|
||||||
|
|
||||||
struct PgfFullFormEntry {
|
|
||||||
GuString tokens;
|
|
||||||
PgfProductionIdx* idx;
|
|
||||||
};
|
|
||||||
|
|
||||||
static void
|
|
||||||
gu_fullform_enum_next(GuEnum* self, void* to, GuPool* pool)
|
|
||||||
{
|
|
||||||
PgfFullFormState* st = gu_container(self, PgfFullFormState, en);
|
|
||||||
PgfFullFormEntry* entry = NULL;
|
|
||||||
|
|
||||||
if (st->sequences != NULL) {
|
|
||||||
size_t n_seqs = gu_seq_length(st->sequences);
|
|
||||||
while (st->seq_idx < n_seqs) {
|
|
||||||
PgfSequence* seq = gu_seq_index(st->sequences, PgfSequence, st->seq_idx);
|
|
||||||
GuString tokens = pgf_get_tokens(seq->syms, 0, pool);
|
|
||||||
|
|
||||||
if (!gu_string_is_prefix(st->prefix, tokens)) {
|
|
||||||
st->seq_idx = n_seqs;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (*tokens != 0 && seq->idx != NULL) {
|
|
||||||
entry = gu_new(PgfFullFormEntry, pool);
|
|
||||||
entry->tokens = tokens;
|
|
||||||
entry->idx = seq->idx;
|
|
||||||
|
|
||||||
st->seq_idx++;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
st->seq_idx++;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
*((PgfFullFormEntry**) to) = entry;
|
|
||||||
}
|
|
||||||
|
|
||||||
PGF_API GuEnum*
|
|
||||||
pgf_fullform_lexicon(PgfConcr *concr, GuPool* pool)
|
|
||||||
{
|
|
||||||
PgfFullFormState* st = gu_new(PgfFullFormState, pool);
|
|
||||||
st->en.next = gu_fullform_enum_next;
|
|
||||||
st->sequences = concr->sequences;
|
|
||||||
st->prefix = "";
|
|
||||||
st->seq_idx = 0;
|
|
||||||
return &st->en;
|
|
||||||
}
|
|
||||||
|
|
||||||
PGF_API GuString
|
|
||||||
pgf_fullform_get_string(PgfFullFormEntry* entry)
|
|
||||||
{
|
|
||||||
return entry->tokens;
|
|
||||||
}
|
|
||||||
|
|
||||||
PGF_API void
|
|
||||||
pgf_fullform_get_analyses(PgfFullFormEntry* entry,
|
|
||||||
PgfMorphoCallback* callback, GuExn* err)
|
|
||||||
{
|
|
||||||
pgf_morpho_iter(entry->idx, callback, err);
|
|
||||||
}
|
|
||||||
|
|
||||||
PGF_API GuEnum*
|
|
||||||
pgf_lookup_word_prefix(PgfConcr *concr, GuString prefix,
|
|
||||||
GuPool* pool, GuExn* err)
|
|
||||||
{
|
|
||||||
if (concr->sequences == NULL) {
|
|
||||||
GuExnData* err_data = gu_raise(err, PgfExn);
|
|
||||||
if (err_data) {
|
|
||||||
err_data->data = "The concrete syntax is not loaded";
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
PgfFullFormState* state = gu_new(PgfFullFormState, pool);
|
|
||||||
state->en.next = gu_fullform_enum_next;
|
|
||||||
state->sequences = concr->sequences;
|
|
||||||
state->prefix = prefix;
|
|
||||||
state->seq_idx = 0;
|
|
||||||
|
|
||||||
bool case_sensitive =
|
|
||||||
(gu_seq_binsearch(concr->cflags, pgf_flag_order, PgfFlag, "case_sensitive") == NULL);
|
|
||||||
|
|
||||||
PgfSequenceOrder order = { { pgf_sequence_cmp_fn }, case_sensitive };
|
|
||||||
if (!gu_seq_binsearch_index(concr->sequences, &order.order,
|
|
||||||
PgfSequence, (void*) prefix,
|
|
||||||
&state->seq_idx)) {
|
|
||||||
state->seq_idx++;
|
|
||||||
}
|
|
||||||
|
|
||||||
return &state->en;
|
|
||||||
}
|
|
||||||
|
|
||||||
PGF_API void
|
PGF_API void
|
||||||
pgf_parser_index(PgfConcr* concr,
|
pgf_parser_index(PgfConcr* concr,
|
||||||
PgfCCat* ccat, PgfProduction prod,
|
PgfCCat* ccat, PgfProduction prod,
|
||||||
|
|||||||
@@ -167,6 +167,22 @@ PGF_API_DECL void
|
|||||||
pgf_lookup_morpho(PgfConcr *concr, GuString sentence,
|
pgf_lookup_morpho(PgfConcr *concr, GuString sentence,
|
||||||
PgfMorphoCallback* callback, GuExn* err);
|
PgfMorphoCallback* callback, GuExn* err);
|
||||||
|
|
||||||
|
typedef struct {
|
||||||
|
size_t pos;
|
||||||
|
GuString ptr;
|
||||||
|
} PgfCohortSpot;
|
||||||
|
|
||||||
|
typedef struct {
|
||||||
|
PgfCohortSpot start;
|
||||||
|
PgfCohortSpot end;
|
||||||
|
GuBuf* buf;
|
||||||
|
} PgfCohortRange;
|
||||||
|
|
||||||
|
PGF_API_DECL GuEnum*
|
||||||
|
pgf_lookup_cohorts(PgfConcr *concr, GuString sentence,
|
||||||
|
PgfMorphoCallback* callback,
|
||||||
|
GuPool* pool, GuExn* err);
|
||||||
|
|
||||||
typedef struct PgfFullFormEntry PgfFullFormEntry;
|
typedef struct PgfFullFormEntry PgfFullFormEntry;
|
||||||
|
|
||||||
PGF_API_DECL GuEnum*
|
PGF_API_DECL GuEnum*
|
||||||
|
|||||||
@@ -94,6 +94,74 @@ pgf_print_fid(int fid, GuOut* out, GuExn* err)
|
|||||||
gu_printf(out, err, "C%d", fid);
|
gu_printf(out, err, "C%d", fid);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
PGF_INTERNAL void
|
||||||
|
pgf_print_production_args(PgfPArgs* args,
|
||||||
|
GuOut* out, GuExn* err)
|
||||||
|
{
|
||||||
|
size_t n_args = gu_seq_length(args);
|
||||||
|
for (size_t j = 0; j < n_args; j++) {
|
||||||
|
if (j > 0)
|
||||||
|
gu_putc(',',out,err);
|
||||||
|
|
||||||
|
PgfPArg arg = gu_seq_get(args, PgfPArg, j);
|
||||||
|
|
||||||
|
if (arg.hypos != NULL &&
|
||||||
|
gu_seq_length(arg.hypos) > 0) {
|
||||||
|
size_t n_hypos = gu_seq_length(arg.hypos);
|
||||||
|
for (size_t k = 0; k < n_hypos; k++) {
|
||||||
|
PgfCCat *hypo = gu_seq_get(arg.hypos, PgfCCat*, k);
|
||||||
|
pgf_print_fid(hypo->fid, out, err);
|
||||||
|
gu_putc(' ',out,err);
|
||||||
|
}
|
||||||
|
gu_puts("-> ",out,err);
|
||||||
|
}
|
||||||
|
|
||||||
|
pgf_print_fid(arg.ccat->fid, out, err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
PGF_INTERNAL void
|
||||||
|
pgf_print_production(int fid, PgfProduction prod,
|
||||||
|
GuOut *out, GuExn* err)
|
||||||
|
{
|
||||||
|
pgf_print_fid(fid, out, err);
|
||||||
|
gu_puts(" -> ", out, err);
|
||||||
|
|
||||||
|
GuVariantInfo i = gu_variant_open(prod);
|
||||||
|
switch (i.tag) {
|
||||||
|
case PGF_PRODUCTION_APPLY: {
|
||||||
|
PgfProductionApply* papp = i.data;
|
||||||
|
gu_printf(out,err,"F%d(",papp->fun->funid);
|
||||||
|
if (papp->fun->ep != NULL) {
|
||||||
|
pgf_print_expr(papp->fun->ep->expr, NULL, 0, out, err);
|
||||||
|
} else {
|
||||||
|
PgfPArg* parg = gu_seq_index(papp->args, PgfPArg, 0);
|
||||||
|
gu_printf(out,err,"linref %s", parg->ccat->cnccat->abscat->name);
|
||||||
|
}
|
||||||
|
gu_printf(out,err,")[");
|
||||||
|
pgf_print_production_args(papp->args,out,err);
|
||||||
|
gu_printf(out,err,"]\n");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case PGF_PRODUCTION_COERCE: {
|
||||||
|
PgfProductionCoerce* pcoerce = i.data;
|
||||||
|
gu_puts("_[",out,err);
|
||||||
|
pgf_print_fid(pcoerce->coerce->fid, out, err);
|
||||||
|
gu_puts("]\n",out,err);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case PGF_PRODUCTION_EXTERN: {
|
||||||
|
PgfProductionExtern* pext = i.data;
|
||||||
|
gu_printf(out,err,"<extern>(");
|
||||||
|
pgf_print_expr(pext->ep->expr, NULL, 0, out, err);
|
||||||
|
gu_printf(out,err,")[]\n");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
gu_impossible();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
pgf_print_productions(GuMapItor* fn, const void* key, void* value,
|
pgf_print_productions(GuMapItor* fn, const void* key, void* value,
|
||||||
GuExn* err)
|
GuExn* err)
|
||||||
@@ -107,48 +175,7 @@ pgf_print_productions(GuMapItor* fn, const void* key, void* value,
|
|||||||
size_t n_prods = gu_seq_length(ccat->prods);
|
size_t n_prods = gu_seq_length(ccat->prods);
|
||||||
for (size_t i = 0; i < n_prods; i++) {
|
for (size_t i = 0; i < n_prods; i++) {
|
||||||
PgfProduction prod = gu_seq_get(ccat->prods, PgfProduction, i);
|
PgfProduction prod = gu_seq_get(ccat->prods, PgfProduction, i);
|
||||||
|
pgf_print_production(fid, prod, out, err);
|
||||||
gu_puts(" ", out, err);
|
|
||||||
pgf_print_fid(fid, out, err);
|
|
||||||
gu_puts(" -> ", out, err);
|
|
||||||
|
|
||||||
GuVariantInfo i = gu_variant_open(prod);
|
|
||||||
switch (i.tag) {
|
|
||||||
case PGF_PRODUCTION_APPLY: {
|
|
||||||
PgfProductionApply* papp = i.data;
|
|
||||||
gu_printf(out,err,"F%d[",papp->fun->funid);
|
|
||||||
size_t n_args = gu_seq_length(papp->args);
|
|
||||||
for (size_t j = 0; j < n_args; j++) {
|
|
||||||
if (j > 0)
|
|
||||||
gu_putc(',',out,err);
|
|
||||||
|
|
||||||
PgfPArg arg = gu_seq_get(papp->args, PgfPArg, j);
|
|
||||||
|
|
||||||
if (arg.hypos != NULL) {
|
|
||||||
size_t n_hypos = gu_seq_length(arg.hypos);
|
|
||||||
for (size_t k = 0; k < n_hypos; k++) {
|
|
||||||
if (k > 0)
|
|
||||||
gu_putc(' ',out,err);
|
|
||||||
PgfCCat *hypo = gu_seq_get(arg.hypos, PgfCCat*, k);
|
|
||||||
pgf_print_fid(hypo->fid, out, err);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pgf_print_fid(arg.ccat->fid, out, err);
|
|
||||||
}
|
|
||||||
gu_printf(out,err,"]\n");
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
case PGF_PRODUCTION_COERCE: {
|
|
||||||
PgfProductionCoerce* pcoerce = i.data;
|
|
||||||
gu_puts("_[", out, err);
|
|
||||||
pgf_print_fid(pcoerce->coerce->fid, out, err);
|
|
||||||
gu_puts("]\n", out, err);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
gu_impossible();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -328,16 +328,20 @@ pgf_read_patt(PgfReader* rdr)
|
|||||||
uint8_t tag = pgf_read_tag(rdr);
|
uint8_t tag = pgf_read_tag(rdr);
|
||||||
switch (tag) {
|
switch (tag) {
|
||||||
case PGF_PATT_APP: {
|
case PGF_PATT_APP: {
|
||||||
PgfPattApp *papp =
|
PgfCId ctor = pgf_read_cid(rdr, rdr->opool);
|
||||||
gu_new_variant(PGF_PATT_APP,
|
|
||||||
PgfPattApp,
|
|
||||||
&patt, rdr->opool);
|
|
||||||
papp->ctor = pgf_read_cid(rdr, rdr->opool);
|
|
||||||
gu_return_on_exn(rdr->err, gu_null_variant);
|
gu_return_on_exn(rdr->err, gu_null_variant);
|
||||||
|
|
||||||
papp->n_args = pgf_read_len(rdr);
|
size_t n_args = pgf_read_len(rdr);
|
||||||
gu_return_on_exn(rdr->err, gu_null_variant);
|
gu_return_on_exn(rdr->err, gu_null_variant);
|
||||||
|
|
||||||
|
PgfPattApp *papp =
|
||||||
|
gu_new_flex_variant(PGF_PATT_APP,
|
||||||
|
PgfPattApp,
|
||||||
|
args, n_args,
|
||||||
|
&patt, rdr->opool);
|
||||||
|
papp->ctor = ctor;
|
||||||
|
papp->n_args = n_args;
|
||||||
|
|
||||||
for (size_t i = 0; i < papp->n_args; i++) {
|
for (size_t i = 0; i < papp->n_args; i++) {
|
||||||
papp->args[i] = pgf_read_patt(rdr);
|
papp->args[i] = pgf_read_patt(rdr);
|
||||||
gu_return_on_exn(rdr->err, gu_null_variant);
|
gu_return_on_exn(rdr->err, gu_null_variant);
|
||||||
|
|||||||
516
src/runtime/c/pgf/scanner.c
Normal file
516
src/runtime/c/pgf/scanner.c
Normal file
@@ -0,0 +1,516 @@
|
|||||||
|
#include <pgf/data.h>
|
||||||
|
#include <pgf/expr.h>
|
||||||
|
#include <pgf/linearizer.h>
|
||||||
|
#include <gu/utf8.h>
|
||||||
|
|
||||||
|
PGF_INTERNAL int
|
||||||
|
cmp_string(PgfCohortSpot* spot, GuString tok,
|
||||||
|
bool case_sensitive)
|
||||||
|
{
|
||||||
|
for (;;) {
|
||||||
|
GuUCS c2 = gu_utf8_decode((const uint8_t**) &tok);
|
||||||
|
if (c2 == 0)
|
||||||
|
return 0;
|
||||||
|
|
||||||
|
const uint8_t* p = (uint8_t*) spot->ptr;
|
||||||
|
GuUCS c1 = gu_utf8_decode(&p);
|
||||||
|
if (c1 == 0)
|
||||||
|
return -1;
|
||||||
|
|
||||||
|
if (!case_sensitive) {
|
||||||
|
c1 = gu_ucs_to_lower(c1);
|
||||||
|
c2 = gu_ucs_to_lower(c2);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (c1 != c2)
|
||||||
|
return (c1-c2);
|
||||||
|
|
||||||
|
spot->ptr = (GuString) p;
|
||||||
|
spot->pos++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
PGF_INTERNAL bool
|
||||||
|
skip_space(GuString* psent, size_t* ppos)
|
||||||
|
{
|
||||||
|
const uint8_t* p = (uint8_t*) *psent;
|
||||||
|
if (!gu_ucs_is_space(gu_utf8_decode(&p)))
|
||||||
|
return false;
|
||||||
|
|
||||||
|
*psent = (GuString) p;
|
||||||
|
(*ppos)++;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
PGF_INTERNAL int
|
||||||
|
pgf_symbols_cmp(PgfCohortSpot* spot,
|
||||||
|
PgfSymbols* syms, size_t* sym_idx,
|
||||||
|
bool case_sensitive)
|
||||||
|
{
|
||||||
|
size_t n_syms = gu_seq_length(syms);
|
||||||
|
while (*sym_idx < n_syms) {
|
||||||
|
PgfSymbol sym = gu_seq_get(syms, PgfSymbol, *sym_idx);
|
||||||
|
|
||||||
|
if (*sym_idx > 0) {
|
||||||
|
if (!skip_space(&spot->ptr,&spot->pos)) {
|
||||||
|
if (*spot->ptr == 0)
|
||||||
|
return -1;
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
while (*spot->ptr != 0) {
|
||||||
|
if (!skip_space(&spot->ptr,&spot->pos))
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
GuVariantInfo inf = gu_variant_open(sym);
|
||||||
|
switch (inf.tag) {
|
||||||
|
case PGF_SYMBOL_CAT:
|
||||||
|
case PGF_SYMBOL_LIT:
|
||||||
|
case PGF_SYMBOL_VAR: {
|
||||||
|
if (*spot->ptr == 0)
|
||||||
|
return -1;
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
case PGF_SYMBOL_KS: {
|
||||||
|
PgfSymbolKS* pks = inf.data;
|
||||||
|
if (*spot->ptr == 0)
|
||||||
|
return -1;
|
||||||
|
|
||||||
|
int cmp = cmp_string(spot,pks->token, case_sensitive);
|
||||||
|
if (cmp != 0)
|
||||||
|
return cmp;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case PGF_SYMBOL_KP:
|
||||||
|
case PGF_SYMBOL_BIND:
|
||||||
|
case PGF_SYMBOL_NE:
|
||||||
|
case PGF_SYMBOL_SOFT_BIND:
|
||||||
|
case PGF_SYMBOL_SOFT_SPACE:
|
||||||
|
case PGF_SYMBOL_CAPIT:
|
||||||
|
case PGF_SYMBOL_ALL_CAPIT: {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
gu_impossible();
|
||||||
|
}
|
||||||
|
|
||||||
|
(*sym_idx)++;
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
static void
|
||||||
|
pgf_morpho_iter(PgfProductionIdx* idx,
|
||||||
|
PgfMorphoCallback* callback,
|
||||||
|
GuExn* err)
|
||||||
|
{
|
||||||
|
size_t n_entries = gu_buf_length(idx);
|
||||||
|
for (size_t i = 0; i < n_entries; i++) {
|
||||||
|
PgfProductionIdxEntry* entry =
|
||||||
|
gu_buf_index(idx, PgfProductionIdxEntry, i);
|
||||||
|
|
||||||
|
PgfCId lemma = entry->papp->fun->absfun->name;
|
||||||
|
GuString analysis = entry->ccat->cnccat->labels[entry->lin_idx];
|
||||||
|
|
||||||
|
prob_t prob = entry->ccat->cnccat->abscat->prob +
|
||||||
|
entry->papp->fun->absfun->ep.prob;
|
||||||
|
callback->callback(callback,
|
||||||
|
lemma, analysis, prob, err);
|
||||||
|
if (!gu_ok(err))
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
typedef struct {
|
||||||
|
GuOrder order;
|
||||||
|
bool case_sensitive;
|
||||||
|
} PgfSequenceOrder;
|
||||||
|
|
||||||
|
PGF_INTERNAL bool
|
||||||
|
pgf_is_case_sensitive(PgfConcr* concr)
|
||||||
|
{
|
||||||
|
PgfFlag* flag =
|
||||||
|
gu_seq_binsearch(concr->cflags, pgf_flag_order, PgfFlag, "case_sensitive");
|
||||||
|
if (flag != NULL) {
|
||||||
|
GuVariantInfo inf = gu_variant_open(flag->value);
|
||||||
|
if (inf.tag == PGF_LITERAL_STR) {
|
||||||
|
PgfLiteralStr* lstr = inf.data;
|
||||||
|
if (strcmp(lstr->val, "off") == 0)
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
static int
|
||||||
|
pgf_sequence_cmp_fn(GuOrder* order, const void* p1, const void* p2)
|
||||||
|
{
|
||||||
|
PgfSequenceOrder* self = gu_container(order, PgfSequenceOrder, order);
|
||||||
|
|
||||||
|
PgfCohortSpot spot = {0, (GuString) p1};
|
||||||
|
|
||||||
|
const PgfSequence* sp2 = p2;
|
||||||
|
|
||||||
|
size_t sym_idx = 0;
|
||||||
|
int res = pgf_symbols_cmp(&spot, sp2->syms, &sym_idx, self->case_sensitive);
|
||||||
|
if (res == 0 && (*spot.ptr != 0 || sym_idx != gu_seq_length(sp2->syms))) {
|
||||||
|
res = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
PGF_API void
|
||||||
|
pgf_lookup_morpho(PgfConcr *concr, GuString sentence,
|
||||||
|
PgfMorphoCallback* callback, GuExn* err)
|
||||||
|
{
|
||||||
|
if (concr->sequences == NULL) {
|
||||||
|
GuExnData* err_data = gu_raise(err, PgfExn);
|
||||||
|
if (err_data) {
|
||||||
|
err_data->data = "The concrete syntax is not loaded";
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
size_t index = 0;
|
||||||
|
PgfSequenceOrder order = { { pgf_sequence_cmp_fn },
|
||||||
|
pgf_is_case_sensitive(concr) };
|
||||||
|
if (gu_seq_binsearch_index(concr->sequences, &order.order,
|
||||||
|
PgfSequence, (void*) sentence,
|
||||||
|
&index)) {
|
||||||
|
PgfSequence* seq = NULL;
|
||||||
|
|
||||||
|
/* If the match is case-insensitive then there might be more
|
||||||
|
* matches around the current index. We must check the neighbour
|
||||||
|
* sequences for matching as well.
|
||||||
|
*/
|
||||||
|
|
||||||
|
if (!order.case_sensitive) {
|
||||||
|
size_t i = index;
|
||||||
|
while (i > 0) {
|
||||||
|
seq = gu_seq_index(concr->sequences, PgfSequence, i-1);
|
||||||
|
|
||||||
|
size_t sym_idx = 0;
|
||||||
|
PgfCohortSpot spot = {0, sentence};
|
||||||
|
if (pgf_symbols_cmp(&spot, seq->syms, &sym_idx, order.case_sensitive) != 0) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (seq->idx != NULL)
|
||||||
|
pgf_morpho_iter(seq->idx, callback, err);
|
||||||
|
|
||||||
|
i--;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
seq = gu_seq_index(concr->sequences, PgfSequence, index);
|
||||||
|
if (seq->idx != NULL)
|
||||||
|
pgf_morpho_iter(seq->idx, callback, err);
|
||||||
|
|
||||||
|
if (!order.case_sensitive) {
|
||||||
|
size_t i = index+1;
|
||||||
|
while (i < gu_seq_length(concr->sequences)) {
|
||||||
|
seq = gu_seq_index(concr->sequences, PgfSequence, i);
|
||||||
|
|
||||||
|
size_t sym_idx = 0;
|
||||||
|
PgfCohortSpot spot = {0, sentence};
|
||||||
|
if (pgf_symbols_cmp(&spot, seq->syms, &sym_idx, order.case_sensitive) != 0) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (seq->idx != NULL)
|
||||||
|
pgf_morpho_iter(seq->idx, callback, err);
|
||||||
|
|
||||||
|
i++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
typedef struct {
|
||||||
|
GuEnum en;
|
||||||
|
PgfConcr* concr;
|
||||||
|
GuString sentence;
|
||||||
|
GuString current;
|
||||||
|
size_t len;
|
||||||
|
PgfMorphoCallback* callback;
|
||||||
|
GuExn* err;
|
||||||
|
bool case_sensitive;
|
||||||
|
GuBuf* spots;
|
||||||
|
GuBuf* found;
|
||||||
|
} PgfCohortsState;
|
||||||
|
|
||||||
|
static int
|
||||||
|
cmp_cohort_spot(GuOrder* self, const void* a, const void* b)
|
||||||
|
{
|
||||||
|
PgfCohortSpot *s1 = (PgfCohortSpot *) a;
|
||||||
|
PgfCohortSpot *s2 = (PgfCohortSpot *) b;
|
||||||
|
|
||||||
|
return (s1->ptr-s2->ptr);
|
||||||
|
}
|
||||||
|
|
||||||
|
static GuOrder
|
||||||
|
pgf_cohort_spot_order[1] = {{ cmp_cohort_spot }};
|
||||||
|
|
||||||
|
static void
|
||||||
|
pgf_lookup_cohorts_helper(PgfCohortsState *state, PgfCohortSpot* spot,
|
||||||
|
int i, int j, ptrdiff_t min, ptrdiff_t max)
|
||||||
|
{
|
||||||
|
// This is a variation of a binary search algorithm which
|
||||||
|
// can retrieve all prefixes of a string with minimal
|
||||||
|
// comparisons, i.e. there is no need to lookup every
|
||||||
|
// prefix separately.
|
||||||
|
|
||||||
|
while (i <= j) {
|
||||||
|
int k = (i+j) / 2;
|
||||||
|
PgfSequence* seq = gu_seq_index(state->concr->sequences, PgfSequence, k);
|
||||||
|
|
||||||
|
PgfCohortSpot current = *spot;
|
||||||
|
|
||||||
|
size_t sym_idx = 0;
|
||||||
|
int cmp = pgf_symbols_cmp(¤t, seq->syms, &sym_idx, state->case_sensitive);
|
||||||
|
if (cmp < 0) {
|
||||||
|
j = k-1;
|
||||||
|
} else if (cmp > 0) {
|
||||||
|
ptrdiff_t len = current.ptr - spot->ptr;
|
||||||
|
|
||||||
|
if (min <= len)
|
||||||
|
pgf_lookup_cohorts_helper(state, spot, i, k-1, min, len);
|
||||||
|
|
||||||
|
if (len+1 <= max)
|
||||||
|
pgf_lookup_cohorts_helper(state, spot, k+1, j, len+1, max);
|
||||||
|
|
||||||
|
break;
|
||||||
|
} else {
|
||||||
|
ptrdiff_t len = current.ptr - spot->ptr;
|
||||||
|
|
||||||
|
if (min <= len)
|
||||||
|
pgf_lookup_cohorts_helper(state, spot, i, k-1, min, len);
|
||||||
|
|
||||||
|
if (seq->idx != NULL && gu_buf_length(seq->idx) > 0) {
|
||||||
|
PgfCohortRange* range = gu_buf_insert(state->found, 0);
|
||||||
|
range->start = *spot;
|
||||||
|
range->end = current;
|
||||||
|
range->buf = seq->idx;
|
||||||
|
}
|
||||||
|
|
||||||
|
while (*current.ptr != 0) {
|
||||||
|
if (!skip_space(¤t.ptr, ¤t.pos))
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
gu_buf_heap_push(state->spots, pgf_cohort_spot_order, ¤t);
|
||||||
|
|
||||||
|
if (len <= max)
|
||||||
|
pgf_lookup_cohorts_helper(state, spot, k+1, j, len, max);
|
||||||
|
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static void
|
||||||
|
pgf_lookup_cohorts_enum_next(GuEnum* self, void* to, GuPool* pool)
|
||||||
|
{
|
||||||
|
PgfCohortsState* state = gu_container(self, PgfCohortsState, en);
|
||||||
|
|
||||||
|
while (gu_buf_length(state->found) == 0 &&
|
||||||
|
gu_buf_length(state->spots) > 0) {
|
||||||
|
PgfCohortSpot spot;
|
||||||
|
gu_buf_heap_pop(state->spots, pgf_cohort_spot_order, &spot);
|
||||||
|
|
||||||
|
if (spot.ptr == state->current)
|
||||||
|
continue;
|
||||||
|
|
||||||
|
if (*spot.ptr == 0)
|
||||||
|
break;
|
||||||
|
|
||||||
|
pgf_lookup_cohorts_helper
|
||||||
|
(state, &spot,
|
||||||
|
0, gu_seq_length(state->concr->sequences)-1,
|
||||||
|
1, (state->sentence+state->len)-spot.ptr);
|
||||||
|
|
||||||
|
if (gu_buf_length(state->found) == 0) {
|
||||||
|
// skip one character and try again
|
||||||
|
gu_utf8_decode((const uint8_t**) &spot.ptr);
|
||||||
|
spot.pos++;
|
||||||
|
gu_buf_heap_push(state->spots, pgf_cohort_spot_order, &spot);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
PgfCohortRange* pRes = (PgfCohortRange*)to;
|
||||||
|
|
||||||
|
if (gu_buf_length(state->found) == 0) {
|
||||||
|
pRes->start.pos = 0;
|
||||||
|
pRes->start.ptr = NULL;
|
||||||
|
pRes->end.pos = 0;
|
||||||
|
pRes->end.ptr = NULL;
|
||||||
|
pRes->buf = NULL;
|
||||||
|
state->current = NULL;
|
||||||
|
return;
|
||||||
|
} else do {
|
||||||
|
*pRes = gu_buf_pop(state->found, PgfCohortRange);
|
||||||
|
state->current = pRes->start.ptr;
|
||||||
|
pgf_morpho_iter(pRes->buf, state->callback, state->err);
|
||||||
|
} while (gu_buf_length(state->found) > 0 &&
|
||||||
|
gu_buf_index_last(state->found, PgfCohortRange)->end.ptr == pRes->end.ptr);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
PGF_API GuEnum*
|
||||||
|
pgf_lookup_cohorts(PgfConcr *concr, GuString sentence,
|
||||||
|
PgfMorphoCallback* callback,
|
||||||
|
GuPool* pool, GuExn* err)
|
||||||
|
{
|
||||||
|
if (concr->sequences == NULL) {
|
||||||
|
GuExnData* err_data = gu_raise(err, PgfExn);
|
||||||
|
if (err_data) {
|
||||||
|
err_data->data = "The concrete syntax is not loaded";
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
PgfCohortsState* state = gu_new(PgfCohortsState, pool);
|
||||||
|
state->en.next = pgf_lookup_cohorts_enum_next;
|
||||||
|
state->concr = concr;
|
||||||
|
state->sentence= sentence;
|
||||||
|
state->len = strlen(sentence);
|
||||||
|
state->callback= callback;
|
||||||
|
state->err = err;
|
||||||
|
state->case_sensitive = pgf_is_case_sensitive(concr);
|
||||||
|
state->spots = gu_new_buf(PgfCohortSpot, pool);
|
||||||
|
state->found = gu_new_buf(PgfCohortRange, pool);
|
||||||
|
|
||||||
|
PgfCohortSpot spot = {0,sentence};
|
||||||
|
while (*spot.ptr != 0) {
|
||||||
|
if (!skip_space(&spot.ptr, &spot.pos))
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
gu_buf_heap_push(state->spots, pgf_cohort_spot_order, &spot);
|
||||||
|
|
||||||
|
return &state->en;
|
||||||
|
}
|
||||||
|
|
||||||
|
typedef struct {
|
||||||
|
GuEnum en;
|
||||||
|
PgfSequences* sequences;
|
||||||
|
GuString prefix;
|
||||||
|
size_t seq_idx;
|
||||||
|
bool case_sensitive;
|
||||||
|
} PgfFullFormState;
|
||||||
|
|
||||||
|
struct PgfFullFormEntry {
|
||||||
|
GuString tokens;
|
||||||
|
PgfProductionIdx* idx;
|
||||||
|
};
|
||||||
|
|
||||||
|
static void
|
||||||
|
gu_fullform_enum_next(GuEnum* self, void* to, GuPool* pool)
|
||||||
|
{
|
||||||
|
PgfFullFormState* st = gu_container(self, PgfFullFormState, en);
|
||||||
|
PgfFullFormEntry* entry = NULL;
|
||||||
|
|
||||||
|
if (st->sequences != NULL) {
|
||||||
|
size_t n_seqs = gu_seq_length(st->sequences);
|
||||||
|
while (st->seq_idx < n_seqs) {
|
||||||
|
PgfSequence* seq = gu_seq_index(st->sequences, PgfSequence, st->seq_idx);
|
||||||
|
GuString tokens = pgf_get_tokens(seq->syms, 0, pool);
|
||||||
|
|
||||||
|
PgfCohortSpot spot = {0, st->prefix};
|
||||||
|
if (cmp_string(&spot, tokens, st->case_sensitive) > 0 || *spot.ptr != 0) {
|
||||||
|
st->seq_idx = n_seqs;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (*tokens != 0 && seq->idx != NULL) {
|
||||||
|
entry = gu_new(PgfFullFormEntry, pool);
|
||||||
|
entry->tokens = tokens;
|
||||||
|
entry->idx = seq->idx;
|
||||||
|
|
||||||
|
st->seq_idx++;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
st->seq_idx++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
*((PgfFullFormEntry**) to) = entry;
|
||||||
|
}
|
||||||
|
|
||||||
|
PGF_API GuEnum*
|
||||||
|
pgf_fullform_lexicon(PgfConcr *concr, GuPool* pool)
|
||||||
|
{
|
||||||
|
PgfFullFormState* st = gu_new(PgfFullFormState, pool);
|
||||||
|
st->en.next = gu_fullform_enum_next;
|
||||||
|
st->sequences = concr->sequences;
|
||||||
|
st->prefix = "";
|
||||||
|
st->seq_idx = 0;
|
||||||
|
st->case_sensitive = true;
|
||||||
|
return &st->en;
|
||||||
|
}
|
||||||
|
|
||||||
|
PGF_API GuString
|
||||||
|
pgf_fullform_get_string(PgfFullFormEntry* entry)
|
||||||
|
{
|
||||||
|
return entry->tokens;
|
||||||
|
}
|
||||||
|
|
||||||
|
PGF_API void
|
||||||
|
pgf_fullform_get_analyses(PgfFullFormEntry* entry,
|
||||||
|
PgfMorphoCallback* callback, GuExn* err)
|
||||||
|
{
|
||||||
|
pgf_morpho_iter(entry->idx, callback, err);
|
||||||
|
}
|
||||||
|
|
||||||
|
PGF_API GuEnum*
|
||||||
|
pgf_lookup_word_prefix(PgfConcr *concr, GuString prefix,
|
||||||
|
GuPool* pool, GuExn* err)
|
||||||
|
{
|
||||||
|
if (concr->sequences == NULL) {
|
||||||
|
GuExnData* err_data = gu_raise(err, PgfExn);
|
||||||
|
if (err_data) {
|
||||||
|
err_data->data = "The concrete syntax is not loaded";
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
PgfFullFormState* state = gu_new(PgfFullFormState, pool);
|
||||||
|
state->en.next = gu_fullform_enum_next;
|
||||||
|
state->sequences = concr->sequences;
|
||||||
|
state->prefix = prefix;
|
||||||
|
state->seq_idx = 0;
|
||||||
|
state->case_sensitive = pgf_is_case_sensitive(concr);
|
||||||
|
|
||||||
|
PgfSequenceOrder order = { { pgf_sequence_cmp_fn },
|
||||||
|
state->case_sensitive };
|
||||||
|
if (!gu_seq_binsearch_index(concr->sequences, &order.order,
|
||||||
|
PgfSequence, (void*) prefix,
|
||||||
|
&state->seq_idx)) {
|
||||||
|
state->seq_idx++;
|
||||||
|
} else if (!state->case_sensitive) {
|
||||||
|
/* If the match is case-insensitive then there might be more
|
||||||
|
* matches around the current index. Since we scroll down
|
||||||
|
* anyway, it is enough to search upwards now.
|
||||||
|
*/
|
||||||
|
|
||||||
|
while (state->seq_idx > 0) {
|
||||||
|
PgfSequence* seq =
|
||||||
|
gu_seq_index(concr->sequences, PgfSequence, state->seq_idx-1);
|
||||||
|
|
||||||
|
size_t sym_idx = 0;
|
||||||
|
PgfCohortSpot spot = {0, state->prefix};
|
||||||
|
if (pgf_symbols_cmp(&spot, seq->syms, &sym_idx, state->case_sensitive) > 0 || *spot.ptr != 0) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
state->seq_idx--;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return &state->en;
|
||||||
|
}
|
||||||
@@ -499,14 +499,17 @@ store_expr(SgSG* sg,
|
|||||||
PgfExprLit* elit = ei.data;
|
PgfExprLit* elit = ei.data;
|
||||||
|
|
||||||
Mem mem[2];
|
Mem mem[2];
|
||||||
|
size_t len = 0;
|
||||||
|
|
||||||
GuVariantInfo li = gu_variant_open(elit->lit);
|
GuVariantInfo li = gu_variant_open(elit->lit);
|
||||||
switch (li.tag) {
|
switch (li.tag) {
|
||||||
case PGF_LITERAL_STR: {
|
case PGF_LITERAL_STR: {
|
||||||
PgfLiteralStr* lstr = li.data;
|
PgfLiteralStr* lstr = li.data;
|
||||||
|
|
||||||
|
len = strlen(lstr->val);
|
||||||
|
|
||||||
mem[0].flags = MEM_Str;
|
mem[0].flags = MEM_Str;
|
||||||
mem[0].n = strlen(lstr->val);
|
mem[0].n = len;
|
||||||
mem[0].z = lstr->val;
|
mem[0].z = lstr->val;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@@ -515,6 +518,7 @@ store_expr(SgSG* sg,
|
|||||||
|
|
||||||
mem[0].flags = MEM_Int;
|
mem[0].flags = MEM_Int;
|
||||||
mem[0].u.i = lint->val;
|
mem[0].u.i = lint->val;
|
||||||
|
len = sizeof(mem[0].u.i);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case PGF_LITERAL_FLT: {
|
case PGF_LITERAL_FLT: {
|
||||||
@@ -522,6 +526,7 @@ store_expr(SgSG* sg,
|
|||||||
|
|
||||||
mem[0].flags = MEM_Real;
|
mem[0].flags = MEM_Real;
|
||||||
mem[0].u.r = lflt->val;
|
mem[0].u.r = lflt->val;
|
||||||
|
len = sizeof(mem[0].u.r);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
@@ -556,7 +561,7 @@ store_expr(SgSG* sg,
|
|||||||
int serial_type_arg = sqlite3BtreeSerialType(&mem[1], file_format);
|
int serial_type_arg = sqlite3BtreeSerialType(&mem[1], file_format);
|
||||||
int serial_type_arg_hdr_len = sqlite3BtreeVarintLen(serial_type_arg);
|
int serial_type_arg_hdr_len = sqlite3BtreeVarintLen(serial_type_arg);
|
||||||
|
|
||||||
unsigned char* buf = malloc(1+serial_type_lit_hdr_len+(serial_type_arg_hdr_len > 1 ? serial_type_arg_hdr_len : 1)+mem[0].n+8);
|
unsigned char* buf = malloc(1+serial_type_lit_hdr_len+(serial_type_arg_hdr_len > 1 ? serial_type_arg_hdr_len : 1)+len+8);
|
||||||
unsigned char* p = buf;
|
unsigned char* p = buf;
|
||||||
*p++ = 1+serial_type_lit_hdr_len+serial_type_arg_hdr_len;
|
*p++ = 1+serial_type_lit_hdr_len+serial_type_arg_hdr_len;
|
||||||
p += putVarint32(p, serial_type_lit);
|
p += putVarint32(p, serial_type_lit);
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user