mirror of
https://github.com/GrammaticalFramework/gf-core.git
synced 2026-04-09 04:59:31 -06:00
Compare commits
130 Commits
wasm
...
release3.1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
17ebcac84f | ||
|
|
7d018dde62 | ||
|
|
4dba12c0ce | ||
|
|
5ca230dd2a | ||
|
|
242cdcfa22 | ||
|
|
052916b454 | ||
|
|
d07646e753 | ||
|
|
3b69a28dbd | ||
|
|
aa004246d2 | ||
|
|
7c6f53d003 | ||
|
|
a6d5d9a50c | ||
|
|
7792c3cc90 | ||
|
|
a7d73a6861 | ||
|
|
646cfbea0c | ||
|
|
7ddb61eb48 | ||
|
|
dcae5f929e | ||
|
|
638ed39fa4 | ||
|
|
726fb3467c | ||
|
|
b02bb08532 | ||
|
|
c7e26d7cd2 | ||
|
|
4fea7cf37f | ||
|
|
9e5701b13c | ||
|
|
78beac7598 | ||
|
|
f96830f7de | ||
|
|
1c4cde7c66 | ||
|
|
e0ad7594dd | ||
|
|
a218903a2d | ||
|
|
f1c1d157b6 | ||
|
|
e7c0b6dada | ||
|
|
8f4e8c73d2 | ||
|
|
d983255326 | ||
|
|
288984d243 | ||
|
|
c23a03a2d1 | ||
|
|
183e421a0f | ||
|
|
3e0c0fa463 | ||
|
|
c2431e06b2 | ||
|
|
eeab15bee1 | ||
|
|
b36b95c4d6 | ||
|
|
2627e73b63 | ||
|
|
e2ff43da0b | ||
|
|
af09351b66 | ||
|
|
8c89ba4e76 | ||
|
|
218c61b004 | ||
|
|
52df0ed4fe | ||
|
|
2324fe795c | ||
|
|
703b1e5d92 | ||
|
|
f1a72a066f | ||
|
|
6f9f9642d7 | ||
|
|
f5752b345a | ||
|
|
5170668ff2 | ||
|
|
65e85c5a3c | ||
|
|
01c4f82e07 | ||
|
|
e81d668605 | ||
|
|
155b9da861 | ||
|
|
ab0f09e9f7 | ||
|
|
9fa8ac934a | ||
|
|
e84826ed2a | ||
|
|
bbf12458c7 | ||
|
|
b914a25de3 | ||
|
|
1037b209ae | ||
|
|
981d6b9bdd | ||
|
|
5776b567a2 | ||
|
|
643617ccc4 | ||
|
|
41f45e572b | ||
|
|
c7226cc11c | ||
|
|
bc56b54dd1 | ||
|
|
aa061aff0c | ||
|
|
934afc9655 | ||
|
|
33b0bab610 | ||
|
|
9492967fc6 | ||
|
|
5eab0a626d | ||
|
|
fc614cd48e | ||
|
|
eaec428a89 | ||
|
|
ed0a8ca0df | ||
|
|
c65dc70aaf | ||
|
|
2a654c085f | ||
|
|
b855a094f8 | ||
|
|
2f31bbab23 | ||
|
|
7e707508a7 | ||
|
|
c2182274df | ||
|
|
e11017abc0 | ||
|
|
b59fe24c11 | ||
|
|
9204884463 | ||
|
|
2c98075a0b | ||
|
|
7d9015e2e1 | ||
|
|
cf1ef40789 | ||
|
|
37f06a4ae8 | ||
|
|
30c1376232 | ||
|
|
ea3cef46b0 | ||
|
|
268a25f59c | ||
|
|
318b710a14 | ||
|
|
b90666455e | ||
|
|
88db715c3d | ||
|
|
003ab57576 | ||
|
|
ffd7b27abd | ||
|
|
096b36c21d | ||
|
|
86af7b12b3 | ||
|
|
e2c2763d59 | ||
|
|
fae2fc4c6c | ||
|
|
5131fadd1f | ||
|
|
0e1cbfaa7e | ||
|
|
95e5976b03 | ||
|
|
9dee033e2c | ||
|
|
83a4a0525e | ||
|
|
f58697f31f | ||
|
|
8f6dc916b6 | ||
|
|
6a36b486fa | ||
|
|
8190d9fe49 | ||
|
|
527a4451d3 | ||
|
|
2c13f529f9 | ||
|
|
8b82f1ab33 | ||
|
|
7bcc70e79d | ||
|
|
85038d0175 | ||
|
|
6edd449d68 | ||
|
|
a58c6d49d4 | ||
|
|
fef7b80d8e | ||
|
|
03df25bb7a | ||
|
|
3122590e35 | ||
|
|
0a16b76875 | ||
|
|
51b7117a3d | ||
|
|
fef03e755b | ||
|
|
223f92d4f6 | ||
|
|
83483b93ba | ||
|
|
dc8dce90a0 | ||
|
|
e9bbd38f68 | ||
|
|
3fac8415ca | ||
|
|
1294269cd6 | ||
|
|
3acb7d2da4 | ||
|
|
08fb29e6b8 | ||
|
|
f69babef6d |
23
.github/workflows/build-all-versions.yml
vendored
23
.github/workflows/build-all-versions.yml
vendored
@@ -12,6 +12,7 @@ jobs:
|
||||
name: ${{ matrix.os }} / ghc ${{ matrix.ghc }}
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
cabal: ["latest"]
|
||||
@@ -19,21 +20,26 @@ jobs:
|
||||
- "8.6.5"
|
||||
- "8.8.3"
|
||||
- "8.10.7"
|
||||
- "9.6.7"
|
||||
exclude:
|
||||
- os: macos-latest
|
||||
ghc: 8.8.3
|
||||
- os: macos-latest
|
||||
ghc: 8.6.5
|
||||
- os: macos-latest
|
||||
ghc: 8.10.7
|
||||
- os: windows-latest
|
||||
ghc: 8.8.3
|
||||
- os: windows-latest
|
||||
ghc: 8.6.5
|
||||
- os: windows-latest
|
||||
ghc: 8.10.7
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
if: github.event.action == 'opened' || github.event.action == 'synchronize' || github.event.ref == 'refs/heads/master'
|
||||
|
||||
- uses: haskell/actions/setup@v1.2.9
|
||||
- uses: haskell-actions/setup@v2
|
||||
id: setup-haskell-cabal
|
||||
name: Setup Haskell
|
||||
with:
|
||||
@@ -44,7 +50,7 @@ jobs:
|
||||
run: |
|
||||
cabal freeze
|
||||
|
||||
- uses: actions/cache@v1
|
||||
- uses: actions/cache@v4
|
||||
name: Cache ~/.cabal/store
|
||||
with:
|
||||
path: ${{ steps.setup-haskell-cabal.outputs.cabal-store }}
|
||||
@@ -62,18 +68,18 @@ jobs:
|
||||
|
||||
stack:
|
||||
name: stack / ghc ${{ matrix.ghc }}
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ matrix.ghc == '7.10.3' && 'ubuntu-20.04' || 'ubuntu-latest' }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
stack: ["latest"]
|
||||
ghc: ["7.10.3","8.0.2", "8.2.2", "8.4.4", "8.6.5", "8.8.4", "8.10.7", "9.0.2"]
|
||||
# ghc: ["8.8.3"]
|
||||
ghc: ["8.4.4", "8.6.5", "8.8.4", "8.10.7", "9.0.2", "9.6.7"]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
if: github.event.action == 'opened' || github.event.action == 'synchronize' || github.event.ref == 'refs/heads/master'
|
||||
|
||||
- uses: haskell/actions/setup@v1.2.9
|
||||
- uses: haskell-actions/setup@v2
|
||||
name: Setup Haskell Stack
|
||||
with:
|
||||
ghc-version: ${{ matrix.ghc }}
|
||||
@@ -85,7 +91,7 @@ jobs:
|
||||
- run: sed -i.bak 's/"C compiler link flags", "/&-no-pie /' /home/runner/.ghcup/ghc/7.10.3/lib/ghc-7.10.3/settings
|
||||
if: matrix.ghc == '7.10.3'
|
||||
|
||||
- uses: actions/cache@v1
|
||||
- uses: actions/cache@v4
|
||||
name: Cache ~/.stack
|
||||
with:
|
||||
path: ~/.stack
|
||||
@@ -95,8 +101,7 @@ jobs:
|
||||
|
||||
- name: Build
|
||||
run: |
|
||||
stack build --system-ghc --stack-yaml stack-ghc${{ matrix.ghc }}.yaml
|
||||
# stack build --system-ghc --test --bench --no-run-tests --no-run-benchmarks
|
||||
stack build --test --no-run-tests --system-ghc --stack-yaml stack-ghc${{ matrix.ghc }}.yaml
|
||||
|
||||
- name: Test
|
||||
run: |
|
||||
|
||||
122
.github/workflows/build-binary-packages.yml
vendored
122
.github/workflows/build-binary-packages.yml
vendored
@@ -2,7 +2,7 @@ name: Build Binary Packages
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
release:
|
||||
release:
|
||||
types: ["created"]
|
||||
|
||||
jobs:
|
||||
@@ -13,9 +13,9 @@ jobs:
|
||||
name: Build Ubuntu package
|
||||
strategy:
|
||||
matrix:
|
||||
os:
|
||||
- ubuntu-18.04
|
||||
- ubuntu-20.04
|
||||
ghc: ["9.6"]
|
||||
cabal: ["3.10"]
|
||||
os: ["ubuntu-24.04"]
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
@@ -25,12 +25,13 @@ jobs:
|
||||
# Note: `haskell-platform` is listed as requirement in debian/control,
|
||||
# which is why it's installed using apt instead of the Setup Haskell action.
|
||||
|
||||
# - name: Setup Haskell
|
||||
# uses: actions/setup-haskell@v1
|
||||
# id: setup-haskell-cabal
|
||||
# with:
|
||||
# ghc-version: ${{ matrix.ghc }}
|
||||
# cabal-version: ${{ matrix.cabal }}
|
||||
- name: Setup Haskell
|
||||
uses: haskell-actions/setup@v2
|
||||
id: setup-haskell-cabal
|
||||
with:
|
||||
ghc-version: ${{ matrix.ghc }}
|
||||
cabal-version: ${{ matrix.cabal }}
|
||||
if: matrix.os == 'ubuntu-24.04'
|
||||
|
||||
- name: Install build tools
|
||||
run: |
|
||||
@@ -39,14 +40,15 @@ jobs:
|
||||
make \
|
||||
dpkg-dev \
|
||||
debhelper \
|
||||
haskell-platform \
|
||||
libghc-json-dev \
|
||||
python-dev \
|
||||
default-jdk \
|
||||
libtool-bin
|
||||
|
||||
python-dev-is-python3 \
|
||||
libtool-bin
|
||||
cabal install alex happy
|
||||
|
||||
- name: Build package
|
||||
run: |
|
||||
export PYTHONPATH="/home/runner/work/gf-core/gf-core/debian/gf/usr/local/lib/python3.12/dist-packages/"
|
||||
make deb
|
||||
|
||||
- name: Copy package
|
||||
@@ -54,7 +56,7 @@ jobs:
|
||||
cp ../gf_*.deb dist/
|
||||
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: gf-${{ github.event.release.tag_name }}-${{ matrix.os }}.deb
|
||||
path: dist/gf_*.deb
|
||||
@@ -64,14 +66,14 @@ jobs:
|
||||
run: |
|
||||
mv dist/gf_*.deb dist/gf-${{ github.event.release.tag_name }}-${{ matrix.os }}.deb
|
||||
|
||||
- uses: actions/upload-release-asset@v1.0.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ github.event.release.upload_url }}
|
||||
asset_path: dist/gf-${{ github.event.release.tag_name }}-${{ matrix.os }}.deb
|
||||
asset_name: gf-${{ github.event.release.tag_name }}-${{ matrix.os }}.deb
|
||||
asset_content_type: application/octet-stream
|
||||
#- uses: actions/upload-release-asset@v1.0.2
|
||||
# env:
|
||||
# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
# with:
|
||||
# upload_url: ${{ github.event.release.upload_url }}
|
||||
# asset_path: dist/gf-${{ github.event.release.tag_name }}-${{ matrix.os }}.deb
|
||||
# asset_name: gf-${{ github.event.release.tag_name }}-${{ matrix.os }}.deb
|
||||
# asset_content_type: application/octet-stream
|
||||
|
||||
# ---
|
||||
|
||||
@@ -79,16 +81,16 @@ jobs:
|
||||
name: Build macOS package
|
||||
strategy:
|
||||
matrix:
|
||||
ghc: ["8.6.5"]
|
||||
cabal: ["2.4"]
|
||||
os: ["macos-10.15"]
|
||||
ghc: ["9.6"]
|
||||
cabal: ["3.10"]
|
||||
os: ["macos-latest", "macos-13"]
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- name: Setup Haskell
|
||||
uses: actions/setup-haskell@v1
|
||||
uses: haskell-actions/setup@v2
|
||||
id: setup-haskell-cabal
|
||||
with:
|
||||
ghc-version: ${{ matrix.ghc }}
|
||||
@@ -97,8 +99,10 @@ jobs:
|
||||
- name: Install build tools
|
||||
run: |
|
||||
brew install \
|
||||
automake
|
||||
automake \
|
||||
libtool
|
||||
cabal v1-install alex happy
|
||||
pip install setuptools
|
||||
|
||||
- name: Build package
|
||||
run: |
|
||||
@@ -107,24 +111,24 @@ jobs:
|
||||
make pkg
|
||||
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: gf-${{ github.event.release.tag_name }}-macos
|
||||
name: gf-${{ github.event.release.tag_name }}-${{ matrix.os }}
|
||||
path: dist/gf-*.pkg
|
||||
if-no-files-found: error
|
||||
|
||||
|
||||
- name: Rename package
|
||||
run: |
|
||||
mv dist/gf-*.pkg dist/gf-${{ github.event.release.tag_name }}-macos.pkg
|
||||
|
||||
- uses: actions/upload-release-asset@v1.0.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ github.event.release.upload_url }}
|
||||
asset_path: dist/gf-${{ github.event.release.tag_name }}-macos.pkg
|
||||
asset_name: gf-${{ github.event.release.tag_name }}-macos.pkg
|
||||
asset_content_type: application/octet-stream
|
||||
#- uses: actions/upload-release-asset@v1.0.2
|
||||
# env:
|
||||
# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
# with:
|
||||
# upload_url: ${{ github.event.release.upload_url }}
|
||||
# asset_path: dist/gf-${{ github.event.release.tag_name }}-macos.pkg
|
||||
# asset_name: gf-${{ github.event.release.tag_name }}-macos.pkg
|
||||
# asset_content_type: application/octet-stream
|
||||
|
||||
# ---
|
||||
|
||||
@@ -132,9 +136,9 @@ jobs:
|
||||
name: Build Windows package
|
||||
strategy:
|
||||
matrix:
|
||||
ghc: ["8.6.5"]
|
||||
cabal: ["2.4"]
|
||||
os: ["windows-2019"]
|
||||
ghc: ["9.6.7"]
|
||||
cabal: ["3.10"]
|
||||
os: ["windows-2022"]
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
@@ -147,6 +151,7 @@ jobs:
|
||||
base-devel
|
||||
gcc
|
||||
python-devel
|
||||
autotools
|
||||
|
||||
- name: Prepare dist folder
|
||||
shell: msys2 {0}
|
||||
@@ -171,7 +176,8 @@ jobs:
|
||||
- name: Build Java bindings
|
||||
shell: msys2 {0}
|
||||
run: |
|
||||
export JDKPATH=/c/hostedtoolcache/windows/Java_Adopt_jdk/8.0.292-10/x64
|
||||
echo $JAVA_HOME_8_X64
|
||||
export JDKPATH="$(cygpath -u "${JAVA_HOME_8_X64}")"
|
||||
export PATH="${PATH}:${JDKPATH}/bin"
|
||||
cd src/runtime/java
|
||||
make \
|
||||
@@ -180,6 +186,9 @@ jobs:
|
||||
make install
|
||||
cp .libs/msys-jpgf-0.dll /c/tmp-dist/java/jpgf.dll
|
||||
cp jpgf.jar /c/tmp-dist/java
|
||||
if: false
|
||||
|
||||
# - uses: actions/setup-python@v5
|
||||
|
||||
- name: Build Python bindings
|
||||
shell: msys2 {0}
|
||||
@@ -188,12 +197,13 @@ jobs:
|
||||
EXTRA_LIB_DIRS: /mingw64/lib
|
||||
run: |
|
||||
cd src/runtime/python
|
||||
pacman --noconfirm -S python-setuptools
|
||||
python setup.py build
|
||||
python setup.py install
|
||||
cp /usr/lib/python3.9/site-packages/pgf* /c/tmp-dist/python
|
||||
cp -r /usr/lib/python3.12/site-packages/pgf* /c/tmp-dist/python
|
||||
|
||||
- name: Setup Haskell
|
||||
uses: actions/setup-haskell@v1
|
||||
uses: haskell-actions/setup@v2
|
||||
id: setup-haskell-cabal
|
||||
with:
|
||||
ghc-version: ${{ matrix.ghc }}
|
||||
@@ -205,13 +215,13 @@ jobs:
|
||||
|
||||
- name: Build GF
|
||||
run: |
|
||||
cabal install --only-dependencies -fserver
|
||||
cabal install -fserver --only-dependencies
|
||||
cabal configure -fserver
|
||||
cabal build
|
||||
copy dist\build\gf\gf.exe C:\tmp-dist
|
||||
copy dist-newstyle/build/x86_64-windows/ghc-${{matrix.ghc}}/*/x/gf/build/gf/gf.exe C:/tmp-dist
|
||||
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v2
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: gf-${{ github.event.release.tag_name }}-windows
|
||||
path: C:\tmp-dist\*
|
||||
@@ -220,11 +230,11 @@ jobs:
|
||||
- name: Create archive
|
||||
run: |
|
||||
Compress-Archive C:\tmp-dist C:\gf-${{ github.event.release.tag_name }}-windows.zip
|
||||
- uses: actions/upload-release-asset@v1.0.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ github.event.release.upload_url }}
|
||||
asset_path: C:\gf-${{ github.event.release.tag_name }}-windows.zip
|
||||
asset_name: gf-${{ github.event.release.tag_name }}-windows.zip
|
||||
asset_content_type: application/zip
|
||||
#- uses: actions/upload-release-asset@v1.0.2
|
||||
# env:
|
||||
# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
# with:
|
||||
# upload_url: ${{ github.event.release.upload_url }}
|
||||
# asset_path: C:\gf-${{ github.event.release.tag_name }}-windows.zip
|
||||
# asset_name: gf-${{ github.event.release.tag_name }}-windows.zip
|
||||
# asset_content_type: application/zip
|
||||
|
||||
38
.github/workflows/build-python-package.yml
vendored
38
.github/workflows/build-python-package.yml
vendored
@@ -13,24 +13,25 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
os: [ubuntu-18.04, macos-10.15]
|
||||
os: [ubuntu-latest, macos-latest, macos-13]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-python@v1
|
||||
- uses: actions/setup-python@v5
|
||||
name: Install Python
|
||||
with:
|
||||
python-version: '3.7'
|
||||
python-version: '3.x'
|
||||
|
||||
- name: Install cibuildwheel
|
||||
run: |
|
||||
python -m pip install git+https://github.com/joerick/cibuildwheel.git@main
|
||||
python -m pip install cibuildwheel
|
||||
|
||||
- name: Install build tools for OSX
|
||||
if: startsWith(matrix.os, 'macos')
|
||||
run: |
|
||||
brew install automake
|
||||
brew install libtool
|
||||
|
||||
- name: Build wheels on Linux
|
||||
if: startsWith(matrix.os, 'macos') != true
|
||||
@@ -42,30 +43,32 @@ jobs:
|
||||
- name: Build wheels on OSX
|
||||
if: startsWith(matrix.os, 'macos')
|
||||
env:
|
||||
CIBW_BEFORE_BUILD: cd src/runtime/c && glibtoolize && autoreconf -i && ./configure && make && make install
|
||||
CIBW_BEFORE_BUILD: cd src/runtime/c && glibtoolize && autoreconf -i && ./configure && make && sudo make install
|
||||
run: |
|
||||
python -m cibuildwheel src/runtime/python --output-dir wheelhouse
|
||||
|
||||
- uses: actions/upload-artifact@v2
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheel-${{ matrix.os }}
|
||||
path: ./wheelhouse
|
||||
|
||||
build_sdist:
|
||||
name: Build source distribution
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-python@v2
|
||||
- uses: actions/setup-python@v5
|
||||
name: Install Python
|
||||
with:
|
||||
python-version: '3.7'
|
||||
python-version: '3.10'
|
||||
|
||||
- name: Build sdist
|
||||
run: cd src/runtime/python && python setup.py sdist
|
||||
|
||||
- uses: actions/upload-artifact@v2
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: wheel-source
|
||||
path: ./src/runtime/python/dist/*.tar.gz
|
||||
|
||||
upload_pypi:
|
||||
@@ -75,24 +78,25 @@ jobs:
|
||||
if: github.ref == 'refs/heads/master' && github.event_name == 'push'
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.x'
|
||||
|
||||
- name: Install twine
|
||||
run: pip install twine
|
||||
|
||||
- uses: actions/download-artifact@v2
|
||||
- uses: actions/download-artifact@v4.1.7
|
||||
with:
|
||||
name: artifact
|
||||
pattern: wheel-*
|
||||
merge-multiple: true
|
||||
path: ./dist
|
||||
|
||||
- name: Publish
|
||||
env:
|
||||
TWINE_USERNAME: __token__
|
||||
TWINE_PASSWORD: ${{ secrets.pypi_password }}
|
||||
TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
|
||||
run: |
|
||||
(cd ./src/runtime/python && curl -I --fail https://pypi.org/project/$(python setup.py --name)/$(python setup.py --version)/) || twine upload dist/*
|
||||
twine upload --verbose --non-interactive --skip-existing dist/*
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -73,3 +73,9 @@ doc/icfp-2012.html
|
||||
download/*.html
|
||||
gf-book/index.html
|
||||
src/www/gf-web-api.html
|
||||
.devenv
|
||||
.direnv
|
||||
result
|
||||
.vscode
|
||||
.envrc
|
||||
.pre-commit-config.yaml
|
||||
@@ -1,6 +1,7 @@
|
||||
### New since 3.11 (WIP)
|
||||
### New since 3.12 (WIP)
|
||||
|
||||
- Added a changelog!
|
||||
### 3.12
|
||||
See <https://www.grammaticalframework.org/download/release-3.12.html>
|
||||
|
||||
### 3.11
|
||||
|
||||
|
||||
2
Makefile
2
Makefile
@@ -50,7 +50,7 @@ html::
|
||||
# number to the top of debian/changelog.
|
||||
# (Tested on Ubuntu 15.04. You need to install dpkg-dev & debhelper.)
|
||||
deb:
|
||||
dpkg-buildpackage -b -uc
|
||||
dpkg-buildpackage -b -uc -d
|
||||
|
||||
# Make a macOS installer package
|
||||
pkg:
|
||||
|
||||
15
README.md
15
README.md
@@ -38,6 +38,21 @@ or:
|
||||
```
|
||||
stack install
|
||||
```
|
||||
Note that if you are unlucky to have Cabal 3.0 or later, then it uses
|
||||
the so-called Nix style commands. Using those for GF development is
|
||||
a pain. Every time when you change something in the source code, Cabal
|
||||
will generate a new folder for GF to look for the GF libraries and
|
||||
the GF cloud. Either reinstall everything with every change in the
|
||||
compiler, or be sane and stop using cabal-install. Instead you can do:
|
||||
```
|
||||
runghc Setup.hs configure
|
||||
runghc Setup.hs build
|
||||
sudo runghc Setup.hs install
|
||||
```
|
||||
The script will install the GF dependencies globally. The only solution
|
||||
to the Nix madness that I found is radical:
|
||||
|
||||
"No person, no problem" (Нет человека – нет проблемы).
|
||||
|
||||
For more information, including links to precompiled binaries, see the [download page](https://www.grammaticalframework.org/download/index.html).
|
||||
|
||||
|
||||
81
Setup.hs
81
Setup.hs
@@ -4,42 +4,68 @@ import Distribution.Simple.LocalBuildInfo(LocalBuildInfo(..),absoluteInstallDirs
|
||||
import Distribution.Simple.Setup(BuildFlags(..),Flag(..),InstallFlags(..),CopyDest(..),CopyFlags(..),SDistFlags(..))
|
||||
import Distribution.PackageDescription(PackageDescription(..),emptyHookedBuildInfo)
|
||||
import Distribution.Simple.BuildPaths(exeExtension)
|
||||
import System.Directory
|
||||
import System.FilePath((</>),(<.>))
|
||||
import System.Process
|
||||
import Control.Monad(forM_,unless)
|
||||
import Control.Exception(bracket_)
|
||||
import Data.Char(isSpace)
|
||||
|
||||
import WebSetup
|
||||
|
||||
-- | Notice about RGL not built anymore
|
||||
noRGLmsg :: IO ()
|
||||
noRGLmsg = putStrLn "Notice: the RGL is not built as part of GF anymore. See https://github.com/GrammaticalFramework/gf-rgl"
|
||||
|
||||
main :: IO ()
|
||||
main = defaultMainWithHooks simpleUserHooks
|
||||
{ preBuild = gfPreBuild
|
||||
{ preConf = gfPreConf
|
||||
, preBuild = gfPreBuild
|
||||
, postBuild = gfPostBuild
|
||||
, preInst = gfPreInst
|
||||
, postInst = gfPostInst
|
||||
, postCopy = gfPostCopy
|
||||
}
|
||||
where
|
||||
gfPreBuild args = gfPre args . buildDistPref
|
||||
gfPreInst args = gfPre args . installDistPref
|
||||
gfPreConf args flags = do
|
||||
pkgs <- fmap (map (dropWhile isSpace) . tail . lines)
|
||||
(readProcess "ghc-pkg" ["list"] "")
|
||||
forM_ dependencies $ \pkg -> do
|
||||
let name = takeWhile (/='/') (drop 36 pkg)
|
||||
unless (name `elem` pkgs) $ do
|
||||
let fname = name <.> ".tar.gz"
|
||||
callProcess "wget" [pkg,"-O",fname]
|
||||
callProcess "tar" ["-xzf",fname]
|
||||
removeFile fname
|
||||
bracket_ (setCurrentDirectory name) (setCurrentDirectory ".." >> removeDirectoryRecursive name) $ do
|
||||
exists <- doesFileExist "Setup.hs"
|
||||
unless exists $ do
|
||||
writeFile "Setup.hs" (unlines [
|
||||
"import Distribution.Simple",
|
||||
"main = defaultMain"
|
||||
])
|
||||
let to_descr = reverse .
|
||||
(++) (reverse ".cabal") .
|
||||
drop 1 .
|
||||
dropWhile (/='-') .
|
||||
reverse
|
||||
callProcess "wget" [to_descr pkg, "-O", to_descr name]
|
||||
callProcess "runghc" ["Setup.hs","configure"]
|
||||
callProcess "runghc" ["Setup.hs","build"]
|
||||
callProcess "sudo" ["runghc","Setup.hs","install"]
|
||||
|
||||
preConf simpleUserHooks args flags
|
||||
|
||||
gfPreBuild args = gfPre args . buildDistPref
|
||||
gfPreInst args = gfPre args . installDistPref
|
||||
|
||||
gfPre args distFlag = do
|
||||
return emptyHookedBuildInfo
|
||||
|
||||
gfPostBuild args flags pkg lbi = do
|
||||
-- noRGLmsg
|
||||
let gf = default_gf lbi
|
||||
buildWeb gf flags (pkg,lbi)
|
||||
|
||||
gfPostInst args flags pkg lbi = do
|
||||
-- noRGLmsg
|
||||
saveInstallPath args flags (pkg,lbi)
|
||||
installWeb (pkg,lbi)
|
||||
|
||||
gfPostCopy args flags pkg lbi = do
|
||||
-- noRGLmsg
|
||||
saveCopyPath args flags (pkg,lbi)
|
||||
copyWeb flags (pkg,lbi)
|
||||
|
||||
-- `cabal sdist` will not make a proper dist archive, for that see `make sdist`
|
||||
@@ -47,27 +73,16 @@ main = defaultMainWithHooks simpleUserHooks
|
||||
gfSDist pkg lbi hooks flags = do
|
||||
return ()
|
||||
|
||||
saveInstallPath :: [String] -> InstallFlags -> (PackageDescription, LocalBuildInfo) -> IO ()
|
||||
saveInstallPath args flags bi = do
|
||||
let
|
||||
dest = NoCopyDest
|
||||
dir = datadir (uncurry absoluteInstallDirs bi dest)
|
||||
writeFile dataDirFile dir
|
||||
|
||||
saveCopyPath :: [String] -> CopyFlags -> (PackageDescription, LocalBuildInfo) -> IO ()
|
||||
saveCopyPath args flags bi = do
|
||||
let
|
||||
dest = case copyDest flags of
|
||||
NoFlag -> NoCopyDest
|
||||
Flag d -> d
|
||||
dir = datadir (uncurry absoluteInstallDirs bi dest)
|
||||
writeFile dataDirFile dir
|
||||
|
||||
-- | Name of file where installation's data directory is recording
|
||||
-- This is a last-resort way in which the seprate RGL build script
|
||||
-- can determine where to put the compiled RGL files
|
||||
dataDirFile :: String
|
||||
dataDirFile = "DATA_DIR"
|
||||
dependencies = [
|
||||
"https://hackage.haskell.org/package/utf8-string-1.0.2/utf8-string-1.0.2.tar.gz",
|
||||
"https://hackage.haskell.org/package/json-0.10/json-0.10.tar.gz",
|
||||
"https://hackage.haskell.org/package/network-bsd-2.8.1.0/network-bsd-2.8.1.0.tar.gz",
|
||||
"https://hackage.haskell.org/package/httpd-shed-0.4.1.1/httpd-shed-0.4.1.1.tar.gz",
|
||||
"https://hackage.haskell.org/package/exceptions-0.10.5/exceptions-0.10.5.tar.gz",
|
||||
"https://hackage.haskell.org/package/stringsearch-0.3.6.6/stringsearch-0.3.6.6.tar.gz",
|
||||
"https://hackage.haskell.org/package/multipart-0.2.1/multipart-0.2.1.tar.gz",
|
||||
"https://hackage.haskell.org/package/cgi-3001.5.0.0/cgi-3001.5.0.0.tar.gz"
|
||||
]
|
||||
|
||||
-- | Get path to locally-built gf
|
||||
default_gf :: LocalBuildInfo -> FilePath
|
||||
|
||||
@@ -32,7 +32,7 @@ set -x # print commands before executing them
|
||||
pushd src/runtime/c
|
||||
bash setup.sh configure --prefix="$prefix"
|
||||
bash setup.sh build
|
||||
bash setup.sh install prefix="$prefix" # hack required for GF build on macOS
|
||||
# bash setup.sh install prefix="$prefix" # hack required for GF build on macOS
|
||||
bash setup.sh install prefix="$destdir$prefix"
|
||||
popd
|
||||
|
||||
@@ -46,7 +46,7 @@ if which >/dev/null python; then
|
||||
pyver=$(ls "$destdir$prefix/lib" | sed -n 's/^python//p')
|
||||
pydest="$destdir/Library/Python/$pyver/site-packages"
|
||||
mkdir -p "$pydest"
|
||||
ln "$destdir$prefix/lib/python$pyver/site-packages"/pgf* "$pydest"
|
||||
ln "$destdir$prefix/lib/python$pyver/site-packages"/pgf*.so "$pydest"
|
||||
fi
|
||||
popd
|
||||
else
|
||||
|
||||
5
debian/changelog
vendored
5
debian/changelog
vendored
@@ -1,3 +1,8 @@
|
||||
gf (3.12) noble; urgency=low
|
||||
|
||||
* GF 3.12
|
||||
|
||||
-- Inari Listenmaa <inari@digitalgrammars.com> Fri, 8 Aug 2025 18:29:29 +0100
|
||||
gf (3.11) bionic focal; urgency=low
|
||||
|
||||
* GF 3.11
|
||||
|
||||
2
debian/control
vendored
2
debian/control
vendored
@@ -3,7 +3,7 @@ Section: devel
|
||||
Priority: optional
|
||||
Maintainer: Thomas Hallgren <hallgren@chalmers.se>
|
||||
Standards-Version: 3.9.2
|
||||
Build-Depends: debhelper (>= 5), haskell-platform (>= 2011.2.0.1), libghc-haskeline-dev, libghc-mtl-dev, libghc-json-dev, autoconf, automake, libtool-bin, python-dev, java-sdk
|
||||
Build-Depends: debhelper (>= 5), libghc-haskeline-dev, libghc-mtl-dev, libghc-json-dev, autoconf, automake, libtool-bin, python-dev-is-python3, java-sdk
|
||||
Homepage: http://www.grammaticalframework.org/
|
||||
|
||||
Package: gf
|
||||
|
||||
12
debian/rules
vendored
12
debian/rules
vendored
@@ -16,7 +16,7 @@ override_dh_shlibdeps:
|
||||
override_dh_auto_configure:
|
||||
cd src/runtime/c && bash setup.sh configure --prefix=/usr
|
||||
cd src/runtime/c && bash setup.sh build
|
||||
cabal v1-update
|
||||
cabal update
|
||||
cabal v1-install --only-dependencies
|
||||
cabal v1-configure --prefix=/usr -fserver -fc-runtime --extra-lib-dirs=$(CURDIR)/src/runtime/c/.libs --extra-include-dirs=$(CURDIR)/src/runtime/c
|
||||
|
||||
@@ -24,7 +24,7 @@ SET_LDL=LD_LIBRARY_PATH=$$LD_LIBRARY_PATH:$(CURDIR)/src/runtime/c/.libs
|
||||
|
||||
override_dh_auto_build:
|
||||
cd src/runtime/python && EXTRA_INCLUDE_DIRS=$(CURDIR)/src/runtime/c EXTRA_LIB_DIRS=$(CURDIR)/src/runtime/c/.libs python setup.py build
|
||||
cd src/runtime/java && make CFLAGS="-I$(CURDIR)/src/runtime/c -L$(CURDIR)/src/runtime/c/.libs" INSTALL_PATH=/usr
|
||||
# cd src/runtime/java && make CFLAGS="-I$(CURDIR)/src/runtime/c -L$(CURDIR)/src/runtime/c/.libs" INSTALL_PATH=/usr
|
||||
echo $(SET_LDL)
|
||||
-$(SET_LDL) cabal v1-build
|
||||
|
||||
@@ -32,13 +32,15 @@ override_dh_auto_install:
|
||||
$(SET_LDL) cabal v1-copy --destdir=$(CURDIR)/debian/gf
|
||||
cd src/runtime/c && bash setup.sh copy prefix=$(CURDIR)/debian/gf/usr
|
||||
cd src/runtime/python && python setup.py install --prefix=$(CURDIR)/debian/gf/usr
|
||||
cd src/runtime/java && make INSTALL_PATH=$(CURDIR)/debian/gf/usr install
|
||||
D="`find debian/gf -name site-packages`" && [ -n "$$D" ] && cd $$D && cd .. && mv site-packages dist-packages
|
||||
# cd src/runtime/java && make INSTALL_PATH=$(CURDIR)/debian/gf/usr install
|
||||
# D="`find debian/gf -name dist-packages`" && [ -n "$$D" ] && cd $$D && cd .. && mv dist-packages dist-packages
|
||||
|
||||
override_dh_usrlocal:
|
||||
|
||||
override_dh_auto_clean:
|
||||
rm -fr dist/build
|
||||
-cd src/runtime/python && rm -fr build
|
||||
-cd src/runtime/java && make clean
|
||||
# -cd src/runtime/java && make clean
|
||||
-cd src/runtime/c && make clean
|
||||
|
||||
override_dh_auto_test:
|
||||
|
||||
75
doc/gf-editor-modes.md
Normal file
75
doc/gf-editor-modes.md
Normal file
@@ -0,0 +1,75 @@
|
||||
# Editor modes & IDE integration for GF
|
||||
|
||||
We collect GF modes for various editors on this page. Contributions are welcome!
|
||||
|
||||
## Emacs
|
||||
|
||||
[gf.el](https://github.com/GrammaticalFramework/gf-emacs-mode) by Johan
|
||||
Bockgård provides syntax highlighting and automatic indentation and
|
||||
lets you run the GF Shell in an emacs buffer. See installation
|
||||
instructions inside.
|
||||
|
||||
## Atom
|
||||
|
||||
[language-gf](https://atom.io/packages/language-gf), by John J. Camilleri
|
||||
|
||||
## Visual Studio Code
|
||||
|
||||
* [Grammatical Framework Language Server](https://marketplace.visualstudio.com/items?itemName=anka-213.gf-vscode) by Andreas Källberg.
|
||||
This provides syntax highlighting and a client for the Grammatical Framework language server. Follow the installation instructions in the link.
|
||||
* [Grammatical Framework](https://marketplace.visualstudio.com/items?itemName=GrammaticalFramework.gf-vscode) is a simpler extension
|
||||
without any external dependencies which provides only syntax highlighting.
|
||||
|
||||
## Eclipse
|
||||
|
||||
[GF Eclipse Plugin](https://github.com/GrammaticalFramework/gf-eclipse-plugin/), by John J. Camilleri
|
||||
|
||||
## Gedit
|
||||
|
||||
By John J. Camilleri
|
||||
|
||||
Copy the file below to
|
||||
`~/.local/share/gtksourceview-3.0/language-specs/gf.lang` (under Ubuntu).
|
||||
|
||||
* [gf.lang](../src/tools/gf.lang)
|
||||
|
||||
Some helpful notes/links:
|
||||
|
||||
* The code is based heavily on the `haskell.lang` file which I found in
|
||||
`/usr/share/gtksourceview-2.0/language-specs/haskell.lang`.
|
||||
* Ruslan Osmanov recommends
|
||||
[registering your file extension as its own MIME type](http://osmanov-dev-notes.blogspot.com/2011/04/how-to-add-new-highlight-mode-in-gedit.html)
|
||||
(see also [here](https://help.ubuntu.com/community/AddingMimeTypes)),
|
||||
however on my system the `.gf` extension was already registered
|
||||
as a generic font (`application/x-tex-gf`) and I didn't want to risk
|
||||
messing any of that up.
|
||||
* This is a quick 5-minute job and might require some tweaking.
|
||||
[The GtkSourceView language definition tutorial](http://developer.gnome.org/gtksourceview/stable/lang-tutorial.html)
|
||||
is the place to start looking.
|
||||
* Contributions are welcome!
|
||||
|
||||
## Geany
|
||||
|
||||
By John J. Camilleri
|
||||
|
||||
[Custom filetype](http://www.geany.org/manual/dev/index.html#custom-filetypes)
|
||||
config files for syntax highlighting in [Geany](http://www.geany.org/).
|
||||
|
||||
For version 1.36 and above, copy one of the files below to
|
||||
`/usr/share/geany/filedefs/filetypes.GF.conf` (under Ubuntu).
|
||||
If you're using a version older than 1.36, copy the file to `/usr/share/geany/filetypes.GF.conf`.
|
||||
You will need to manually create the file.
|
||||
|
||||
* [light-filetypes.GF.conf](../src/tools/light-filetypes.GF.conf)
|
||||
* [dark-filetypes.GF.conf](../src/tools/dark-filetypes.GF.conf)
|
||||
|
||||
You will also need to edit the `filetype_extensions.conf` file and add the
|
||||
following line somewhere:
|
||||
|
||||
```
|
||||
GF=*.gf
|
||||
```
|
||||
|
||||
## Vim
|
||||
|
||||
[vim-gf](https://github.com/gdetrez/vim-gf)
|
||||
@@ -1,79 +0,0 @@
|
||||
Editor modes & IDE integration for GF
|
||||
|
||||
|
||||
We collect GF modes for various editors on this page. Contributions are
|
||||
welcome!
|
||||
|
||||
|
||||
==Emacs==
|
||||
|
||||
[gf.el https://github.com/GrammaticalFramework/gf-emacs-mode] by Johan
|
||||
Bockgård provides syntax highlighting and automatic indentation and
|
||||
lets you run the GF Shell in an emacs buffer. See installation
|
||||
instructions inside.
|
||||
|
||||
==Atom==
|
||||
[language-gf https://atom.io/packages/language-gf], by John J. Camilleri
|
||||
|
||||
==Visual Studio Code==
|
||||
|
||||
- [Grammatical Framework Language Server https://marketplace.visualstudio.com/items?itemName=anka-213.gf-vscode] by Andreas Källberg.
|
||||
This provides syntax highlighting and a client for the Grammatical Framework language server. Follow the installation instructions in the link.
|
||||
- [Grammatical Framework https://marketplace.visualstudio.com/items?itemName=GrammaticalFramework.gf-vscode] is a simpler extension
|
||||
without any external dependencies which provides only syntax highlighting.
|
||||
|
||||
==Eclipse==
|
||||
|
||||
[GF Eclipse Plugin https://github.com/GrammaticalFramework/gf-eclipse-plugin/], by John J. Camilleri
|
||||
|
||||
==Gedit==
|
||||
|
||||
By John J. Camilleri
|
||||
|
||||
Copy the file below to
|
||||
``~/.local/share/gtksourceview-3.0/language-specs/gf.lang`` (under Ubuntu).
|
||||
|
||||
- [gf.lang ../src/tools/gf.lang]
|
||||
|
||||
|
||||
Some helpful notes/links:
|
||||
|
||||
- The code is based heavily on the ``haskell.lang`` file which I found in
|
||||
``/usr/share/gtksourceview-2.0/language-specs/haskell.lang``.
|
||||
- Ruslan Osmanov recommends
|
||||
[registering your file extension as its own MIME type http://osmanov-dev-notes.blogspot.com/2011/04/how-to-add-new-highlight-mode-in-gedit.html]
|
||||
(see also [here https://help.ubuntu.com/community/AddingMimeTypes]),
|
||||
however on my system the ``.gf`` extension was already registered
|
||||
as a generic font (``application/x-tex-gf``) and I didn't want to risk
|
||||
messing any of that up.
|
||||
- This is a quick 5-minute job and might require some tweaking.
|
||||
[The GtkSourceView language definition tutorial http://developer.gnome.org/gtksourceview/stable/lang-tutorial.html]
|
||||
is the place to start looking.
|
||||
- Contributions are welcome!
|
||||
|
||||
|
||||
==Geany==
|
||||
|
||||
By John J. Camilleri
|
||||
|
||||
[Custom filetype http://www.geany.org/manual/dev/index.html#custom-filetypes]
|
||||
config files for syntax highlighting in [Geany http://www.geany.org/].
|
||||
|
||||
Copy one of the files below to ``/usr/share/geany/filetypes.GF.conf``
|
||||
(under Ubuntu). You will need to manually create the file.
|
||||
|
||||
- [light-filetypes.GF.conf ../src/tools/light-filetypes.GF.conf]
|
||||
- [dark-filetypes.GF.conf ../src/tools/dark-filetypes.GF.conf]
|
||||
|
||||
|
||||
You will also need to edit the ``filetype_extensions.conf`` file and add the
|
||||
following line somewhere:
|
||||
|
||||
```
|
||||
GF=*.gf
|
||||
```
|
||||
|
||||
|
||||
==Vim==
|
||||
|
||||
[vim-gf https://github.com/gdetrez/vim-gf]
|
||||
@@ -46,7 +46,7 @@
|
||||
#TINY
|
||||
|
||||
The command has one argument which is either function, expression or
|
||||
a category defined in the abstract syntax of the current grammar.
|
||||
a category defined in the abstract syntax of the current grammar.
|
||||
If the argument is a function then ?its type is printed out.
|
||||
If it is a category then the category definition is printed.
|
||||
If a whole expression is given it prints the expression with refined
|
||||
@@ -303,7 +303,7 @@ but the resulting .gf file must be imported separately.
|
||||
|
||||
#TINY
|
||||
|
||||
Generates a list of random trees, by default one tree.
|
||||
Generates a list of random trees, by default one tree up to depth 5.
|
||||
If a tree argument is given, the command completes the Tree with values to
|
||||
all metavariables in the tree. The generation can be biased by probabilities,
|
||||
given in a file in the -probs flag.
|
||||
@@ -315,13 +315,14 @@ given in a file in the -probs flag.
|
||||
| ``-cat`` | generation category
|
||||
| ``-lang`` | uses only functions that have linearizations in all these languages
|
||||
| ``-number`` | number of trees generated
|
||||
| ``-depth`` | the maximum generation depth
|
||||
| ``-depth`` | the maximum generation depth (default: 5)
|
||||
| ``-probs`` | file with biased probabilities (format 'f 0.4' one by line)
|
||||
|
||||
- Examples:
|
||||
|
||||
| ``gr`` | one tree in the startcat of the current grammar
|
||||
| ``gr -cat=NP -number=16`` | 16 trees in the category NP
|
||||
| ``gr -cat=NP -depth=2`` | one tree in the category NP, up to depth 2
|
||||
| ``gr -lang=LangHin,LangTha -cat=Cl`` | Cl, both in LangHin and LangTha
|
||||
| ``gr -probs=FILE`` | generate with bias
|
||||
| ``gr (AdjCN ? (UseN ?))`` | generate trees of form (AdjCN ? (UseN ?))
|
||||
@@ -338,8 +339,8 @@ given in a file in the -probs flag.
|
||||
|
||||
#TINY
|
||||
|
||||
Generates all trees of a given category. By default,
|
||||
the depth is limited to 4, but this can be changed by a flag.
|
||||
Generates all trees of a given category. By default,
|
||||
the depth is limited to 5, but this can be changed by a flag.
|
||||
If a Tree argument is given, the command completes the Tree with values
|
||||
to all metavariables in the tree.
|
||||
|
||||
@@ -353,7 +354,7 @@ to all metavariables in the tree.
|
||||
|
||||
- Examples:
|
||||
|
||||
| ``gt`` | all trees in the startcat, to depth 4
|
||||
| ``gt`` | all trees in the startcat, to depth 5
|
||||
| ``gt -cat=NP -number=16`` | 16 trees in the category NP
|
||||
| ``gt -cat=NP -depth=2`` | trees in the category NP to depth 2
|
||||
| ``gt (AdjCN ? (UseN ?))`` | trees of form (AdjCN ? (UseN ?))
|
||||
@@ -582,7 +583,7 @@ trees where a function node is a metavariable.
|
||||
|
||||
- Examples:
|
||||
|
||||
| ``l -lang=LangSwe,LangNor -chunks ? a b (? c d)`` |
|
||||
| ``l -lang=LangSwe,LangNor -chunks ? a b (? c d)`` |
|
||||
|
||||
|
||||
#NORMAL
|
||||
@@ -647,7 +648,7 @@ The -lang flag can be used to restrict this to fewer languages.
|
||||
The default start category can be overridden by the -cat flag.
|
||||
See also the ps command for lexing and character encoding.
|
||||
|
||||
The -openclass flag is experimental and allows some robustness in
|
||||
The -openclass flag is experimental and allows some robustness in
|
||||
the parser. For example if -openclass="A,N,V" is given, the parser
|
||||
will accept unknown adjectives, nouns and verbs with the resource grammar.
|
||||
|
||||
|
||||
@@ -1188,7 +1188,7 @@ use ``generate_trees = gt``.
|
||||
this wine is fresh
|
||||
this wine is warm
|
||||
```
|
||||
The default **depth** is 3; the depth can be
|
||||
The default **depth** is 5; the depth can be
|
||||
set by using the ``depth`` flag:
|
||||
```
|
||||
> generate_trees -depth=2 | l
|
||||
@@ -1265,10 +1265,16 @@ Human eye may prefer to see a visualization: ``visualize_tree = vt``:
|
||||
> parse "this delicious cheese is very Italian" | visualize_tree
|
||||
```
|
||||
The tree is generated in postscript (``.ps``) file. The ``-view`` option is used for
|
||||
telling what command to use to view the file. Its default is ``"open"``, which works
|
||||
on Mac OS X. On Ubuntu Linux, one can write
|
||||
telling what command to use to view the file.
|
||||
|
||||
This works on Mac OS X:
|
||||
```
|
||||
> parse "this delicious cheese is very Italian" | visualize_tree -view="eog"
|
||||
> parse "this delicious cheese is very Italian" | visualize_tree -view=open
|
||||
```
|
||||
On Linux, one can use one of the following commands.
|
||||
```
|
||||
> parse "this delicious cheese is very Italian" | visualize_tree -view=eog
|
||||
> parse "this delicious cheese is very Italian" | visualize_tree -view=xdg-open
|
||||
```
|
||||
|
||||
|
||||
@@ -1733,6 +1739,13 @@ A new module can **extend** an old one:
|
||||
Pizza : Kind ;
|
||||
}
|
||||
```
|
||||
Note that the extended grammar doesn't inherit the start
|
||||
category from the grammar it extends, so if you want to
|
||||
generate sentences with this grammar, you'll have to either
|
||||
add a startcat (e.g. ``flags startcat = Question ;``),
|
||||
or in the GF shell, specify the category to ``generate_random`` or ``geneate_trees``
|
||||
(e.g. ``gr -cat=Comment`` or ``gt -cat=Question``).
|
||||
|
||||
Parallel to the abstract syntax, extensions can
|
||||
be built for concrete syntaxes:
|
||||
```
|
||||
@@ -3733,7 +3746,7 @@ However, type-incorrect commands are rejected by the typecheck:
|
||||
The parsing is successful but the type checking failed with error(s):
|
||||
Couldn't match expected type Device light
|
||||
against the interred type Device fan
|
||||
In the expression: DKindOne fan
|
||||
In the expression: DKindOne fan
|
||||
```
|
||||
|
||||
#NEW
|
||||
@@ -4171,7 +4184,7 @@ division of integers.
|
||||
```
|
||||
abstract Calculator = {
|
||||
flags startcat = Exp ;
|
||||
|
||||
|
||||
cat Exp ;
|
||||
|
||||
fun
|
||||
@@ -4578,7 +4591,7 @@ in any multilingual grammar between any languages in the grammar.
|
||||
module Main where
|
||||
|
||||
import PGF
|
||||
import System (getArgs)
|
||||
import System.Environment (getArgs)
|
||||
|
||||
main :: IO ()
|
||||
main = do
|
||||
|
||||
@@ -139,6 +139,8 @@ stack install
|
||||
For more info on working with the GF source code, see the
|
||||
[GF Developers Guide](../doc/gf-developers.html).
|
||||
|
||||
For macOS Sequoia, you need to downgrade the LLVM package, see instructions [here](https://github.com/GrammaticalFramework/gf-core/issues/172#issuecomment-2599365457).
|
||||
|
||||
## Installing the Python bindings from PyPI
|
||||
|
||||
The Python library is available on PyPI as `pgf`, so it can be installed using:
|
||||
|
||||
184
download/index-3.12.md
Normal file
184
download/index-3.12.md
Normal file
@@ -0,0 +1,184 @@
|
||||
---
|
||||
title: Grammatical Framework Download and Installation
|
||||
date: 8 August 2025
|
||||
---
|
||||
|
||||
**GF 3.12** was released on 8 August 2025.
|
||||
|
||||
What's new? See the [release notes](release-3.12.html).
|
||||
|
||||
#### Note: GF core and the RGL
|
||||
|
||||
The following instructions explain how to install **GF core**, i.e. the compiler, shell and run-time systems.
|
||||
Obtaining the **Resource Grammar Library (RGL)** is done separately; see the section [at the bottom of this page](#installing-the-rgl-from-a-binary-release).
|
||||
|
||||
---
|
||||
|
||||
## Installing from a binary package
|
||||
|
||||
Binary packages are available for Debian/Ubuntu, macOS, and Windows and include:
|
||||
|
||||
- GF shell and grammar compiler
|
||||
- `gf -server` mode
|
||||
- C run-time system
|
||||
- Python bindings to the C run-time system
|
||||
|
||||
[Binary packages on GitHub](https://github.com/GrammaticalFramework/gf-core/releases/tag/3.12)
|
||||
|
||||
#### Debian/Ubuntu
|
||||
|
||||
The package targets Ubuntu 24.04 (Noble).
|
||||
To install it, use:
|
||||
|
||||
```
|
||||
sudo apt install ./gf-3.12-ubuntu-24.04.deb
|
||||
```
|
||||
|
||||
#### macOS
|
||||
|
||||
To install the package, just double-click it and follow the installer instructions.
|
||||
|
||||
#### Windows
|
||||
|
||||
To install the package:
|
||||
|
||||
1. unpack it anywhere and take note of the full path to the folder containing the `.exe` file.
|
||||
2. add it to the `PATH` environment variable
|
||||
|
||||
For more information, see [Using GF on Windows](https://www.grammaticalframework.org/~inari/gf-windows.html) (latest updated for Windows 10).
|
||||
|
||||
## Installing from Hackage
|
||||
|
||||
_Instructions applicable for macOS, Linux, and WSL2 on Windows._
|
||||
|
||||
[GF is on Hackage](http://hackage.haskell.org/package/gf), so under
|
||||
normal circumstances the procedure is fairly simple:
|
||||
|
||||
```
|
||||
cabal update
|
||||
cabal install gf-3.12
|
||||
```
|
||||
|
||||
### Notes
|
||||
|
||||
#### GHC version
|
||||
|
||||
The GF source code is known to be compilable with GHC versions 7.10 through to 9.6.7.
|
||||
|
||||
#### Obtaining Haskell
|
||||
|
||||
There are various ways of obtaining Haskell, including:
|
||||
|
||||
- ghcup
|
||||
1. Install from https://www.haskell.org/ghcup/
|
||||
2. `ghcup install ghc 9.6.7`
|
||||
3. `ghcup set ghc 9.6.7`
|
||||
- Stack: https://haskellstack.org/
|
||||
|
||||
|
||||
#### Installation location
|
||||
|
||||
The above steps install GF for a single user.
|
||||
The executables are put in `$HOME/.cabal/bin` (or on macOS in `$HOME/Library/Haskell/bin`),
|
||||
so you might want to add this directory to your path (in `.bash_profile` or similar):
|
||||
|
||||
```
|
||||
PATH=$HOME/.cabal/bin:$PATH
|
||||
```
|
||||
|
||||
#### Haskeline
|
||||
|
||||
GF uses [`haskeline`](http://hackage.haskell.org/package/haskeline), which
|
||||
on Linux depends on some non-Haskell libraries that won't be installed
|
||||
automatically by Cabal, and therefore need to be installed manually.
|
||||
Here is one way to do this:
|
||||
|
||||
- On Ubuntu: `sudo apt-get install libghc-haskeline-dev`
|
||||
- On Fedora: `sudo dnf install ghc-haskeline-devel`
|
||||
|
||||
## Installing from source code
|
||||
|
||||
### Obtaining
|
||||
|
||||
To obtain the source code for the **release**,
|
||||
download it from [GitHub](https://github.com/GrammaticalFramework/gf-core/releases).
|
||||
|
||||
Alternatively, to obtain the **latest version** of the source code:
|
||||
|
||||
1. If you haven't already, clone the repository with:
|
||||
```
|
||||
git clone https://github.com/GrammaticalFramework/gf-core.git
|
||||
```
|
||||
2. If you've already cloned the repository previously, update with:
|
||||
```
|
||||
git pull
|
||||
```
|
||||
|
||||
### Installing
|
||||
|
||||
You can then install with:
|
||||
```
|
||||
cabal install
|
||||
```
|
||||
|
||||
or, if you're a Stack user:
|
||||
|
||||
```
|
||||
stack install
|
||||
```
|
||||
|
||||
<!--The above notes for installing from source apply also in these cases.-->
|
||||
For more info on working with the GF source code, see the
|
||||
[GF Developers Guide](../doc/gf-developers.html).
|
||||
|
||||
## Installing the Python bindings from PyPI
|
||||
|
||||
The Python library is available on PyPI as `pgf`, so it can be installed using:
|
||||
|
||||
```
|
||||
pip install pgf
|
||||
```
|
||||
|
||||
If this doesn't work, you will need to install the C runtime manually; see the instructions [here](https://www.grammaticalframework.org/doc/gf-developers.html#toc12).
|
||||
|
||||
---
|
||||
|
||||
## Installing the RGL from a binary release
|
||||
|
||||
Binary releases of the RGL are made available on [GitHub](https://github.com/GrammaticalFramework/gf-rgl/releases).
|
||||
In general the steps to follow are:
|
||||
|
||||
1. Download a binary release and extract it somewhere on your system.
|
||||
2. Set the environment variable `GF_LIB_PATH` to point to wherever you extracted the RGL.
|
||||
|
||||
For more information, see [Using GF on Windows](https://www.grammaticalframework.org/~inari/gf-windows.html) (latest updated for Windows 10).
|
||||
|
||||
## Installing the RGL from source
|
||||
|
||||
To compile the RGL, you will need to have GF already installed and in your path.
|
||||
|
||||
1. Obtain the RGL source code, either by:
|
||||
- cloning with `git clone https://github.com/GrammaticalFramework/gf-rgl.git`
|
||||
- downloading a source archive [here](https://github.com/GrammaticalFramework/gf-rgl/archive/master.zip)
|
||||
2. Run `make` in the source code folder.
|
||||
|
||||
For more options, see the [RGL README](https://github.com/GrammaticalFramework/gf-rgl/blob/master/README.md).
|
||||
|
||||
---
|
||||
|
||||
## Older releases
|
||||
|
||||
- [GF 3.11](index-3.11.html) (July 2021)
|
||||
- [GF 3.10](index-3.10.html) (December 2018)
|
||||
- [GF 3.9](index-3.9.html) (August 2017)
|
||||
- [GF 3.8](index-3.8.html) (June 2016)
|
||||
- [GF 3.7.1](index-3.7.1.html) (October 2015)
|
||||
- [GF 3.7](index-3.7.html) (June 2015)
|
||||
- [GF 3.6](index-3.6.html) (June 2014)
|
||||
- [GF 3.5](index-3.5.html) (August 2013)
|
||||
- [GF 3.4](index-3.4.html) (January 2013)
|
||||
- [GF 3.3.3](index-3.3.3.html) (March 2012)
|
||||
- [GF 3.3](index-3.3.html) (October 2011)
|
||||
- [GF 3.2.9](index-3.2.9.html) source-only snapshot (September 2011)
|
||||
- [GF 3.2](index-3.2.html) (December 2010)
|
||||
- [GF 3.1.6](index-3.1.6.html) (April 2010)
|
||||
@@ -3,6 +3,6 @@
|
||||
<meta http-equiv="refresh" content="0; URL=/download/index-3.11.html" />
|
||||
</head>
|
||||
<body>
|
||||
You are being redirected to <a href="index-3.11.html">the current version</a> of this page.
|
||||
You are being redirected to <a href="index-3.12.html">the current version</a> of this page.
|
||||
</body>
|
||||
</html>
|
||||
|
||||
37
download/release-3.12.md
Normal file
37
download/release-3.12.md
Normal file
@@ -0,0 +1,37 @@
|
||||
---
|
||||
title: GF 3.12 Release Notes
|
||||
date: 08 August 2025
|
||||
---
|
||||
|
||||
## Installation
|
||||
|
||||
See the [download page](index-3.12.html).
|
||||
|
||||
## What's new
|
||||
This release adds support for Apple Silicon M1 Mac computers and newer versions of GHC, along with various improvements and bug fixes.
|
||||
|
||||
Over 70 commits have been merged to gf-core since the release of GF 3.11 in July 2021.
|
||||
|
||||
## General
|
||||
- Support for ARM, allowing to run GF on Mac computers with Apple Silicon M1
|
||||
- Support for newer versions of GHC (8.10.7, 9.0.2, 9.2.4, 9.4, 9.6.7)
|
||||
- Support compiling with Nix
|
||||
- Better error messages
|
||||
- Improvements to several GF shell commands
|
||||
- Several bug fixes and performance improvements
|
||||
- Temporarily dropped support for Java bindings
|
||||
|
||||
## GF compiler and run-time library
|
||||
- Syntactic sugar for table update: `table {cases ; vvv => t \! vvv}.t` can now be written as `t ** { cases }`
|
||||
- Adjust the `-view` command depending on the OS
|
||||
- Improve output of the `visualize_dependencies` (`vd`) command for large dependency trees
|
||||
- Reintroduce syntactic transfer with `pt -transfer` and fix a bug in `pt -compute`
|
||||
- Bug fix: apply `gt` to all arguments when piped
|
||||
- Fix many "Invalid character" messages by always encoding GF files in UTF-8
|
||||
- Improve performance with long extend-lists
|
||||
- Improve syntax error messages
|
||||
- Add support for BIND tokens in the Python bindings
|
||||
- Allow compilation with emscripten
|
||||
|
||||
## Other
|
||||
- Add support for Visual Studio Code
|
||||
43
flake.lock
generated
Normal file
43
flake.lock
generated
Normal file
@@ -0,0 +1,43 @@
|
||||
{
|
||||
"nodes": {
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1704290814,
|
||||
"narHash": "sha256-LWvKHp7kGxk/GEtlrGYV68qIvPHkU9iToomNFGagixU=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "70bdadeb94ffc8806c0570eb5c2695ad29f0e421",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixos-23.05",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"nixpkgs": "nixpkgs",
|
||||
"systems": "systems"
|
||||
}
|
||||
},
|
||||
"systems": {
|
||||
"locked": {
|
||||
"lastModified": 1681028828,
|
||||
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-systems",
|
||||
"repo": "default",
|
||||
"type": "github"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 7
|
||||
}
|
||||
50
flake.nix
Normal file
50
flake.nix
Normal file
@@ -0,0 +1,50 @@
|
||||
{
|
||||
inputs = {
|
||||
nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.05";
|
||||
systems.url = "github:nix-systems/default";
|
||||
};
|
||||
|
||||
nixConfig = {
|
||||
# extra-trusted-public-keys =
|
||||
# "devenv.cachix.org-1:w1cLUi8dv3hnoSPGAuibQv+f9TZLr6cv/Hm9XgU50cw=";
|
||||
# extra-substituters = "https://devenv.cachix.org";
|
||||
};
|
||||
|
||||
outputs = { self, nixpkgs, systems, ... }@inputs:
|
||||
let forEachSystem = nixpkgs.lib.genAttrs (import systems);
|
||||
in {
|
||||
packages = forEachSystem (system:
|
||||
let
|
||||
pkgs = nixpkgs.legacyPackages.${system};
|
||||
haskellPackages = pkgs.haskell.packages.ghc925.override {
|
||||
overrides = self: _super: {
|
||||
cgi = pkgs.haskell.lib.unmarkBroken (pkgs.haskell.lib.dontCheck
|
||||
(self.callHackage "cgi" "3001.5.0.1" { }));
|
||||
};
|
||||
};
|
||||
|
||||
in {
|
||||
gf = pkgs.haskell.lib.overrideCabal
|
||||
(haskellPackages.callCabal2nixWithOptions "gf" self "--flag=-server"
|
||||
{ }) (_old: {
|
||||
# Fix utf8 encoding problems
|
||||
patches = [
|
||||
# Already applied in master
|
||||
# (
|
||||
# pkgs.fetchpatch {
|
||||
# url = "https://github.com/anka-213/gf-core/commit/6f1ca05fddbcbc860898ddf10a557b513dfafc18.patch";
|
||||
# sha256 = "17vn3hncxm1dwbgpfmrl6gk6wljz3r28j191lpv5zx741pmzgbnm";
|
||||
# }
|
||||
# )
|
||||
./nix/expose-all.patch
|
||||
./nix/revert-new-cabal-madness.patch
|
||||
];
|
||||
jailbreak = true;
|
||||
# executableSystemDepends = [
|
||||
# (pkgs.ncurses.override { enableStatic = true; })
|
||||
# ];
|
||||
# executableHaskellDepends = [ ];
|
||||
});
|
||||
});
|
||||
};
|
||||
}
|
||||
@@ -2,7 +2,7 @@ concrete FoodIta of Food = {
|
||||
lincat
|
||||
Comment, Item, Kind, Quality = Str ;
|
||||
lin
|
||||
Pred item quality = item ++ "è" ++ quality ;
|
||||
Pred item quality = item ++ "è" ++ quality ;
|
||||
This kind = "questo" ++ kind ;
|
||||
That kind = "quel" ++ kind ;
|
||||
Mod quality kind = kind ++ quality ;
|
||||
|
||||
@@ -32,5 +32,5 @@ resource ResIta = open Prelude in {
|
||||
in
|
||||
adjective nero (ner+"a") (ner+"i") (ner+"e") ;
|
||||
copula : Number => Str =
|
||||
table {Sg => "è" ; Pl => "sono"} ;
|
||||
table {Sg => "è" ; Pl => "sono"} ;
|
||||
}
|
||||
|
||||
@@ -8,13 +8,13 @@ instance LexFoodsFin of LexFoods =
|
||||
cheese_N = mkN "juusto" ;
|
||||
fish_N = mkN "kala" ;
|
||||
fresh_A = mkA "tuore" ;
|
||||
warm_A = mkA
|
||||
(mkN "lämmin" "lämpimän" "lämmintä" "lämpimänä" "lämpimään"
|
||||
"lämpiminä" "lämpimiä" "lämpimien" "lämpimissä" "lämpimiin"
|
||||
)
|
||||
"lämpimämpi" "lämpimin" ;
|
||||
warm_A = mkA
|
||||
(mkN "lämmin" "lämpimän" "lämmintä" "lämpimänä" "lämpimään"
|
||||
"lämpiminä" "lämpimiä" "lämpimien" "lämpimissä" "lämpimiin"
|
||||
)
|
||||
"lämpimämpi" "lämpimin" ;
|
||||
italian_A = mkA "italialainen" ;
|
||||
expensive_A = mkA "kallis" ;
|
||||
delicious_A = mkA "herkullinen" ;
|
||||
boring_A = mkA "tylsä" ;
|
||||
boring_A = mkA "tylsä" ;
|
||||
}
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
-- (c) 2009 Aarne Ranta under LGPL
|
||||
|
||||
instance LexFoodsGer of LexFoods =
|
||||
instance LexFoodsGer of LexFoods =
|
||||
open SyntaxGer, ParadigmsGer in {
|
||||
oper
|
||||
wine_N = mkN "Wein" ;
|
||||
pizza_N = mkN "Pizza" "Pizzen" feminine ;
|
||||
cheese_N = mkN "Käse" "Käse" masculine ;
|
||||
cheese_N = mkN "Käse" "Käse" masculine ;
|
||||
fish_N = mkN "Fisch" ;
|
||||
fresh_A = mkA "frisch" ;
|
||||
warm_A = mkA "warm" "wärmer" "wärmste" ;
|
||||
warm_A = mkA "warm" "wärmer" "wärmste" ;
|
||||
italian_A = mkA "italienisch" ;
|
||||
expensive_A = mkA "teuer" ;
|
||||
delicious_A = mkA "köstlich" ;
|
||||
delicious_A = mkA "köstlich" ;
|
||||
boring_A = mkA "langweilig" ;
|
||||
}
|
||||
|
||||
@@ -7,10 +7,10 @@ instance LexFoodsSwe of LexFoods =
|
||||
pizza_N = mkN "pizza" ;
|
||||
cheese_N = mkN "ost" ;
|
||||
fish_N = mkN "fisk" ;
|
||||
fresh_A = mkA "färsk" ;
|
||||
fresh_A = mkA "färsk" ;
|
||||
warm_A = mkA "varm" ;
|
||||
italian_A = mkA "italiensk" ;
|
||||
expensive_A = mkA "dyr" ;
|
||||
delicious_A = mkA "läcker" ;
|
||||
boring_A = mkA "tråkig" ;
|
||||
delicious_A = mkA "läcker" ;
|
||||
boring_A = mkA "tråkig" ;
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ concrete QueryFin of Query = {
|
||||
Odd = pred "pariton" ;
|
||||
Prime = pred "alkuluku" ;
|
||||
Number i = i.s ;
|
||||
Yes = "kyllä" ;
|
||||
Yes = "kyllä" ;
|
||||
No = "ei" ;
|
||||
oper
|
||||
pred : Str -> Str -> Str = \f,x -> "onko" ++ x ++ f ;
|
||||
|
||||
@@ -43,10 +43,10 @@ oper
|
||||
} ;
|
||||
|
||||
auxVerb : Aux -> Verb = \a -> case a of {
|
||||
Avere =>
|
||||
Avere =>
|
||||
mkVerb "avere" "ho" "hai" "ha" "abbiamo" "avete" "hanno" "avuto" Avere ;
|
||||
Essere =>
|
||||
mkVerb "essere" "sono" "sei" "è" "siamo" "siete" "sono" "stato" Essere
|
||||
Essere =>
|
||||
mkVerb "essere" "sono" "sei" "è" "siamo" "siete" "sono" "stato" Essere
|
||||
} ;
|
||||
|
||||
agrPart : Verb -> Agr -> ClitAgr -> Str = \v,a,c -> case v.aux of {
|
||||
|
||||
45
gf.cabal
45
gf.cabal
@@ -1,8 +1,8 @@
|
||||
name: gf
|
||||
version: 3.11.0-git
|
||||
version: 3.12.0
|
||||
|
||||
cabal-version: 1.22
|
||||
build-type: Custom
|
||||
build-type: Simple
|
||||
license: OtherLicense
|
||||
license-file: LICENSE
|
||||
category: Natural Language Processing, Compiler
|
||||
@@ -11,7 +11,7 @@ description: GF, Grammatical Framework, is a programming language for multilingu
|
||||
maintainer: John J. Camilleri <john@digitalgrammars.com>
|
||||
homepage: https://www.grammaticalframework.org/
|
||||
bug-reports: https://github.com/GrammaticalFramework/gf-core/issues
|
||||
tested-with: GHC==7.10.3, GHC==8.0.2, GHC==8.10.4, GHC==9.0.2
|
||||
tested-with: GHC==7.10.3, GHC==8.0.2, GHC==8.10.4, GHC==9.0.2, GHC==9.2.4
|
||||
|
||||
data-dir: src
|
||||
extra-source-files:
|
||||
@@ -44,14 +44,6 @@ data-files:
|
||||
www/translator/*.css
|
||||
www/translator/*.js
|
||||
|
||||
custom-setup
|
||||
setup-depends:
|
||||
base >= 4.9.1 && < 4.16,
|
||||
Cabal >= 1.22.0.0,
|
||||
directory >= 1.3.0 && < 1.4,
|
||||
filepath >= 1.4.1 && < 1.5,
|
||||
process >= 1.0.1.1 && < 1.7
|
||||
|
||||
source-repository head
|
||||
type: git
|
||||
location: https://github.com/GrammaticalFramework/gf-core.git
|
||||
@@ -81,20 +73,20 @@ library
|
||||
build-depends:
|
||||
-- GHC 8.0.2 to GHC 8.10.4
|
||||
array >= 0.5.1 && < 0.6,
|
||||
base >= 4.9.1 && < 4.16,
|
||||
bytestring >= 0.10.8 && < 0.11,
|
||||
base >= 4.9.1 && < 4.22,
|
||||
bytestring >= 0.10.8 && < 0.12,
|
||||
containers >= 0.5.7 && < 0.7,
|
||||
exceptions >= 0.8.3 && < 0.11,
|
||||
ghc-prim >= 0.5.0 && < 0.7.1,
|
||||
mtl >= 2.2.1 && < 2.3,
|
||||
ghc-prim >= 0.5.0 && <= 0.10.0,
|
||||
mtl >= 2.2.1 && <= 2.3.1,
|
||||
pretty >= 1.1.3 && < 1.2,
|
||||
random >= 1.1 && < 1.3,
|
||||
utf8-string >= 1.0.1.1 && < 1.1,
|
||||
-- We need transformers-compat >= 0.6.3, but that is only in newer snapshots where it is redundant.
|
||||
transformers-compat >= 0.5.1.4 && < 0.7
|
||||
utf8-string >= 1.0.1.1 && < 1.1
|
||||
|
||||
if impl(ghc<8.0)
|
||||
build-depends:
|
||||
-- We need this in order for ghc-7.10 to build
|
||||
transformers-compat >= 0.6.3 && < 0.7,
|
||||
fail >= 4.9.0 && < 4.10
|
||||
|
||||
hs-source-dirs: src/runtime/haskell
|
||||
@@ -163,10 +155,11 @@ library
|
||||
directory >= 1.3.0 && < 1.4,
|
||||
filepath >= 1.4.1 && < 1.5,
|
||||
haskeline >= 0.7.3 && < 0.9,
|
||||
json >= 0.9.1 && < 0.11,
|
||||
json >= 0.9.1 && <= 0.11,
|
||||
parallel >= 3.2.1.1 && < 3.3,
|
||||
process >= 1.4.3 && < 1.7,
|
||||
time >= 1.6.0 && < 1.10
|
||||
time >= 1.6.0 && <= 1.12.2,
|
||||
template-haskell >= 2.13.0.0
|
||||
|
||||
hs-source-dirs: src/compiler
|
||||
exposed-modules:
|
||||
@@ -354,8 +347,14 @@ library
|
||||
Win32 >= 2.3.1.1 && < 2.7
|
||||
else
|
||||
build-depends:
|
||||
terminfo >=0.4.0 && < 0.5,
|
||||
unix >= 2.7.2 && < 2.8
|
||||
terminfo >=0.4.0 && < 0.5
|
||||
|
||||
if impl(ghc >= 9.6.6)
|
||||
build-depends: unix >= 2.8
|
||||
|
||||
else
|
||||
build-depends: unix >= 2.7.2 && < 2.8
|
||||
|
||||
|
||||
if impl(ghc>=8.2)
|
||||
ghc-options: -fhide-source-paths
|
||||
@@ -400,7 +399,7 @@ test-suite gf-tests
|
||||
main-is: run.hs
|
||||
hs-source-dirs: testsuite
|
||||
build-depends:
|
||||
base >= 4.9.1 && < 4.16,
|
||||
base >= 4.9.1,
|
||||
Cabal >= 1.8,
|
||||
directory >= 1.3.0 && < 1.4,
|
||||
filepath >= 1.4.1 && < 1.5,
|
||||
|
||||
33
index.html
33
index.html
@@ -57,6 +57,7 @@
|
||||
<li><a href="doc/gf-shell-reference.html">Shell Reference</a></li>
|
||||
<li><a href="http://www.molto-project.eu/sites/default/files/MOLTO_D2.3.pdf">Best Practices</a> <small>[PDF]</small></li>
|
||||
<li><a href="https://www.mitpressjournals.org/doi/pdf/10.1162/COLI_a_00378">Scaling Up (Computational Linguistics 2020)</a></li>
|
||||
<li><a href="https://inariksit.github.io/blog/">GF blog</a></li>
|
||||
</ul>
|
||||
|
||||
<a href="lib/doc/synopsis/index.html" class="btn btn-primary ml-3">
|
||||
@@ -86,11 +87,6 @@
|
||||
<h3>Contribute</h3>
|
||||
<ul class="mb-2">
|
||||
<li>
|
||||
<a href="https://web.libera.chat/?channels=#gf">
|
||||
<i class="fas fa-hashtag"></i>
|
||||
IRC
|
||||
</a>
|
||||
/
|
||||
<a href="https://discord.gg/EvfUsjzmaz">
|
||||
<i class="fab fa-discord"></i>
|
||||
Discord
|
||||
@@ -104,7 +100,7 @@
|
||||
</li>
|
||||
<li><a href="https://groups.google.com/group/gf-dev">Mailing List</a></li>
|
||||
<li><a href="https://github.com/GrammaticalFramework/gf-core/issues">Issue Tracker</a></li>
|
||||
<li><a href="//school.grammaticalframework.org/2020/">Summer School</a></li>
|
||||
<li><a href="//school.grammaticalframework.org/">Summer School</a></li>
|
||||
<li><a href="doc/gf-people.html">Authors</a></li>
|
||||
</ul>
|
||||
<a href="https://github.com/GrammaticalFramework/" class="btn btn-primary ml-3">
|
||||
@@ -231,14 +227,10 @@ least one, it may help you to get a first idea of what GF is.
|
||||
</p>
|
||||
|
||||
<p>
|
||||
We run the IRC channel <strong><code>#gf</code></strong> on the Libera network, where you are welcome to look for help with small questions or just start a general discussion.
|
||||
You can <a href="https://web.libera.chat/?channels=#gf">open a web chat</a>
|
||||
or <a href="https://www.grammaticalframework.org/irc/?C=M;O=D">browse the channel logs</a>.
|
||||
</p>
|
||||
<p>
|
||||
There is also a <a href="https://discord.gg/EvfUsjzmaz">GF server on Discord</a>.
|
||||
We run the <a href="https://discord.gg/EvfUsjzmaz">GF server on Discord</a>, where you are welcome to look for help with small questions or just start a general discussion.
|
||||
</p>
|
||||
|
||||
|
||||
<p>
|
||||
For bug reports and feature requests, please create an issue in the
|
||||
<a href="https://github.com/GrammaticalFramework/gf-core/issues">GF Core</a> or
|
||||
@@ -253,6 +245,19 @@ least one, it may help you to get a first idea of what GF is.
|
||||
<div class="col-md-6">
|
||||
<h2>News</h2>
|
||||
<dl class="row">
|
||||
<dt class="col-sm-3 text-center text-nowrap">2025-08-08</dt>
|
||||
<dd class="col-sm-9">
|
||||
<strong>GF 3.12 released.</strong>
|
||||
<a href="download/release-3.12.html">Release notes</a>
|
||||
</dd>
|
||||
<dt class="col-sm-3 text-center text-nowrap">2025-01-18</dt>
|
||||
<dd class="col-sm-9">
|
||||
<a href="//school.grammaticalframework.org/2025/">9th GF Summer School</a>, in Gothenburg, Sweden, 18 – 29 August 2025.
|
||||
</dd>
|
||||
<dt class="col-sm-3 text-center text-nowrap">2023-01-24</dt>
|
||||
<dd class="col-sm-9">
|
||||
<a href="//school.grammaticalframework.org/2023/">8th GF Summer School</a>, in Tampere, Finland, 14 – 25 August 2023.
|
||||
</dd>
|
||||
<dt class="col-sm-3 text-center text-nowrap">2021-07-25</dt>
|
||||
<dd class="col-sm-9">
|
||||
<strong>GF 3.11 released.</strong>
|
||||
@@ -262,10 +267,6 @@ least one, it may help you to get a first idea of what GF is.
|
||||
<dd class="col-sm-9">
|
||||
<a href="https://cloud.grammaticalframework.org/wordnet/">GF WordNet</a> now supports languages for which there are no other WordNets. New additions: Afrikaans, German, Korean, Maltese, Polish, Somali, Swahili.
|
||||
</dd>
|
||||
<dt class="col-sm-3 text-center text-nowrap">2021-03-01</dt>
|
||||
<dd class="col-sm-9">
|
||||
<a href="//school.grammaticalframework.org/2020/">Seventh GF Summer School</a>, in Singapore and online, 26 July – 6 August 2021.
|
||||
</dd>
|
||||
<dt class="col-sm-3 text-center text-nowrap">2020-09-29</dt>
|
||||
<dd class="col-sm-9">
|
||||
<a href="https://www.mitpressjournals.org/doi/pdf/10.1162/COLI_a_00378">Abstract Syntax as Interlingua</a>: Scaling Up the Grammatical Framework from Controlled Languages to Robust Pipelines. A paper in Computational Linguistics (2020) summarizing much of the development in GF in the past ten years.
|
||||
|
||||
12
nix/expose-all.patch
Normal file
12
nix/expose-all.patch
Normal file
@@ -0,0 +1,12 @@
|
||||
diff --git a/gf.cabal b/gf.cabal
|
||||
index 0076e7638..8d3fe4b49 100644
|
||||
--- a/gf.cabal
|
||||
+++ b/gf.cabal
|
||||
@@ -168,7 +168,6 @@ Library
|
||||
GF.Text.Lexing
|
||||
GF.Grammar.Canonical
|
||||
|
||||
- other-modules:
|
||||
GF.Main
|
||||
GF.Compiler
|
||||
GF.Interactive
|
||||
193
nix/revert-new-cabal-madness.patch
Normal file
193
nix/revert-new-cabal-madness.patch
Normal file
@@ -0,0 +1,193 @@
|
||||
commit 45e5473fcd5707af93646d9a116867a4d4e3e9c9
|
||||
Author: Andreas Källberg <anka.213@gmail.com>
|
||||
Date: Mon Oct 10 14:57:12 2022 +0200
|
||||
|
||||
Revert "workaround for the Nix madness"
|
||||
|
||||
This reverts commit 1294269cd60f3db7b056135104615625baeb528c.
|
||||
|
||||
There are easier workarounds, like using
|
||||
|
||||
cabal v1-build
|
||||
|
||||
etc. instead of just `cabal build`
|
||||
|
||||
These changes also broke a whole bunch of other stuff
|
||||
|
||||
diff --git a/README.md b/README.md
|
||||
index ba35795a4..79e6ab68f 100644
|
||||
--- a/README.md
|
||||
+++ b/README.md
|
||||
@@ -38,21 +38,6 @@ or:
|
||||
```
|
||||
stack install
|
||||
```
|
||||
-Note that if you are unlucky to have Cabal 3.0 or later, then it uses
|
||||
-the so-called Nix style commands. Using those for GF development is
|
||||
-a pain. Every time when you change something in the source code, Cabal
|
||||
-will generate a new folder for GF to look for the GF libraries and
|
||||
-the GF cloud. Either reinstall everything with every change in the
|
||||
-compiler, or be sane and stop using cabal-install. Instead you can do:
|
||||
-```
|
||||
-runghc Setup.hs configure
|
||||
-runghc Setup.hs build
|
||||
-sudo runghc Setup.hs install
|
||||
-```
|
||||
-The script will install the GF dependencies globally. The only solution
|
||||
-to the Nix madness that I found is radical:
|
||||
-
|
||||
- "No person, no problem" (Нет человека – нет проблемы).
|
||||
|
||||
For more information, including links to precompiled binaries, see the [download page](https://www.grammaticalframework.org/download/index.html).
|
||||
|
||||
diff --git a/Setup.hs b/Setup.hs
|
||||
index 58dc3e0c6..f8309cc00 100644
|
||||
--- a/Setup.hs
|
||||
+++ b/Setup.hs
|
||||
@@ -4,68 +4,42 @@ import Distribution.Simple.LocalBuildInfo(LocalBuildInfo(..),absoluteInstallDirs
|
||||
import Distribution.Simple.Setup(BuildFlags(..),Flag(..),InstallFlags(..),CopyDest(..),CopyFlags(..),SDistFlags(..))
|
||||
import Distribution.PackageDescription(PackageDescription(..),emptyHookedBuildInfo)
|
||||
import Distribution.Simple.BuildPaths(exeExtension)
|
||||
-import System.Directory
|
||||
import System.FilePath((</>),(<.>))
|
||||
-import System.Process
|
||||
-import Control.Monad(forM_,unless)
|
||||
-import Control.Exception(bracket_)
|
||||
-import Data.Char(isSpace)
|
||||
|
||||
import WebSetup
|
||||
|
||||
+-- | Notice about RGL not built anymore
|
||||
+noRGLmsg :: IO ()
|
||||
+noRGLmsg = putStrLn "Notice: the RGL is not built as part of GF anymore. See https://github.com/GrammaticalFramework/gf-rgl"
|
||||
+
|
||||
main :: IO ()
|
||||
main = defaultMainWithHooks simpleUserHooks
|
||||
- { preConf = gfPreConf
|
||||
- , preBuild = gfPreBuild
|
||||
+ { preBuild = gfPreBuild
|
||||
, postBuild = gfPostBuild
|
||||
, preInst = gfPreInst
|
||||
, postInst = gfPostInst
|
||||
, postCopy = gfPostCopy
|
||||
}
|
||||
where
|
||||
- gfPreConf args flags = do
|
||||
- pkgs <- fmap (map (dropWhile isSpace) . tail . lines)
|
||||
- (readProcess "ghc-pkg" ["list"] "")
|
||||
- forM_ dependencies $ \pkg -> do
|
||||
- let name = takeWhile (/='/') (drop 36 pkg)
|
||||
- unless (name `elem` pkgs) $ do
|
||||
- let fname = name <.> ".tar.gz"
|
||||
- callProcess "wget" [pkg,"-O",fname]
|
||||
- callProcess "tar" ["-xzf",fname]
|
||||
- removeFile fname
|
||||
- bracket_ (setCurrentDirectory name) (setCurrentDirectory ".." >> removeDirectoryRecursive name) $ do
|
||||
- exists <- doesFileExist "Setup.hs"
|
||||
- unless exists $ do
|
||||
- writeFile "Setup.hs" (unlines [
|
||||
- "import Distribution.Simple",
|
||||
- "main = defaultMain"
|
||||
- ])
|
||||
- let to_descr = reverse .
|
||||
- (++) (reverse ".cabal") .
|
||||
- drop 1 .
|
||||
- dropWhile (/='-') .
|
||||
- reverse
|
||||
- callProcess "wget" [to_descr pkg, "-O", to_descr name]
|
||||
- callProcess "runghc" ["Setup.hs","configure"]
|
||||
- callProcess "runghc" ["Setup.hs","build"]
|
||||
- callProcess "sudo" ["runghc","Setup.hs","install"]
|
||||
-
|
||||
- preConf simpleUserHooks args flags
|
||||
-
|
||||
- gfPreBuild args = gfPre args . buildDistPref
|
||||
- gfPreInst args = gfPre args . installDistPref
|
||||
+ gfPreBuild args = gfPre args . buildDistPref
|
||||
+ gfPreInst args = gfPre args . installDistPref
|
||||
|
||||
gfPre args distFlag = do
|
||||
return emptyHookedBuildInfo
|
||||
|
||||
gfPostBuild args flags pkg lbi = do
|
||||
+ -- noRGLmsg
|
||||
let gf = default_gf lbi
|
||||
buildWeb gf flags (pkg,lbi)
|
||||
|
||||
gfPostInst args flags pkg lbi = do
|
||||
+ -- noRGLmsg
|
||||
+ saveInstallPath args flags (pkg,lbi)
|
||||
installWeb (pkg,lbi)
|
||||
|
||||
gfPostCopy args flags pkg lbi = do
|
||||
+ -- noRGLmsg
|
||||
+ saveCopyPath args flags (pkg,lbi)
|
||||
copyWeb flags (pkg,lbi)
|
||||
|
||||
-- `cabal sdist` will not make a proper dist archive, for that see `make sdist`
|
||||
@@ -73,16 +47,27 @@ main = defaultMainWithHooks simpleUserHooks
|
||||
gfSDist pkg lbi hooks flags = do
|
||||
return ()
|
||||
|
||||
-dependencies = [
|
||||
- "https://hackage.haskell.org/package/utf8-string-1.0.2/utf8-string-1.0.2.tar.gz",
|
||||
- "https://hackage.haskell.org/package/json-0.10/json-0.10.tar.gz",
|
||||
- "https://hackage.haskell.org/package/network-bsd-2.8.1.0/network-bsd-2.8.1.0.tar.gz",
|
||||
- "https://hackage.haskell.org/package/httpd-shed-0.4.1.1/httpd-shed-0.4.1.1.tar.gz",
|
||||
- "https://hackage.haskell.org/package/exceptions-0.10.5/exceptions-0.10.5.tar.gz",
|
||||
- "https://hackage.haskell.org/package/stringsearch-0.3.6.6/stringsearch-0.3.6.6.tar.gz",
|
||||
- "https://hackage.haskell.org/package/multipart-0.2.1/multipart-0.2.1.tar.gz",
|
||||
- "https://hackage.haskell.org/package/cgi-3001.5.0.0/cgi-3001.5.0.0.tar.gz"
|
||||
- ]
|
||||
+saveInstallPath :: [String] -> InstallFlags -> (PackageDescription, LocalBuildInfo) -> IO ()
|
||||
+saveInstallPath args flags bi = do
|
||||
+ let
|
||||
+ dest = NoCopyDest
|
||||
+ dir = datadir (uncurry absoluteInstallDirs bi dest)
|
||||
+ writeFile dataDirFile dir
|
||||
+
|
||||
+saveCopyPath :: [String] -> CopyFlags -> (PackageDescription, LocalBuildInfo) -> IO ()
|
||||
+saveCopyPath args flags bi = do
|
||||
+ let
|
||||
+ dest = case copyDest flags of
|
||||
+ NoFlag -> NoCopyDest
|
||||
+ Flag d -> d
|
||||
+ dir = datadir (uncurry absoluteInstallDirs bi dest)
|
||||
+ writeFile dataDirFile dir
|
||||
+
|
||||
+-- | Name of file where installation's data directory is recording
|
||||
+-- This is a last-resort way in which the seprate RGL build script
|
||||
+-- can determine where to put the compiled RGL files
|
||||
+dataDirFile :: String
|
||||
+dataDirFile = "DATA_DIR"
|
||||
|
||||
-- | Get path to locally-built gf
|
||||
default_gf :: LocalBuildInfo -> FilePath
|
||||
diff --git a/gf.cabal b/gf.cabal
|
||||
index a055b86be..d00a5b935 100644
|
||||
--- a/gf.cabal
|
||||
+++ b/gf.cabal
|
||||
@@ -2,7 +2,7 @@ name: gf
|
||||
version: 3.11.0-git
|
||||
|
||||
cabal-version: 1.22
|
||||
-build-type: Simple
|
||||
+build-type: Custom
|
||||
license: OtherLicense
|
||||
license-file: LICENSE
|
||||
category: Natural Language Processing, Compiler
|
||||
@@ -44,6 +44,14 @@ data-files:
|
||||
www/translator/*.css
|
||||
www/translator/*.js
|
||||
|
||||
+custom-setup
|
||||
+ setup-depends:
|
||||
+ base >= 4.9.1 && < 4.16,
|
||||
+ Cabal >= 1.22.0.0,
|
||||
+ directory >= 1.3.0 && < 1.4,
|
||||
+ filepath >= 1.4.1 && < 1.5,
|
||||
+ process >= 1.0.1.1 && < 1.7
|
||||
+
|
||||
source-repository head
|
||||
type: git
|
||||
location: https://github.com/GrammaticalFramework/gf-core.git
|
||||
@@ -22,6 +22,7 @@ import GF.Infra.SIO
|
||||
import GF.Command.Abstract
|
||||
import GF.Command.CommandInfo
|
||||
import GF.Command.CommonCommands
|
||||
import qualified GF.Command.CommonCommands as Common
|
||||
import GF.Text.Clitics
|
||||
import GF.Quiz
|
||||
|
||||
@@ -166,14 +167,15 @@ pgfCommands = Map.fromList [
|
||||
synopsis = "generate random trees in the current abstract syntax",
|
||||
syntax = "gr [-cat=CAT] [-number=INT]",
|
||||
examples = [
|
||||
mkEx "gr -- one tree in the startcat of the current grammar",
|
||||
mkEx "gr -cat=NP -number=16 -- 16 trees in the category NP",
|
||||
mkEx "gr -lang=LangHin,LangTha -cat=Cl -- Cl, both in LangHin and LangTha",
|
||||
mkEx "gr -probs=FILE -- generate with bias",
|
||||
mkEx "gr (AdjCN ? (UseN ?)) -- generate trees of form (AdjCN ? (UseN ?))"
|
||||
mkEx $ "gr -- one tree in the startcat of the current grammar, up to depth " ++ Common.default_depth_str,
|
||||
mkEx "gr -cat=NP -number=16 -- 16 trees in the category NP",
|
||||
mkEx "gr -cat=NP -depth=2 -- one tree in the category NP, up to depth 2",
|
||||
mkEx "gr -lang=LangHin,LangTha -cat=Cl -- Cl, both in LangHin and LangTha",
|
||||
mkEx "gr -probs=FILE -- generate with bias",
|
||||
mkEx "gr (AdjCN ? (UseN ?)) -- generate trees of form (AdjCN ? (UseN ?))"
|
||||
],
|
||||
explanation = unlines [
|
||||
"Generates a list of random trees, by default one tree.",
|
||||
"Generates a list of random trees, by default one tree up to depth " ++ Common.default_depth_str ++ ".",
|
||||
"If a tree argument is given, the command completes the Tree with values to",
|
||||
"all metavariables in the tree. The generation can be biased by probabilities,",
|
||||
"given in a file in the -probs flag."
|
||||
@@ -182,13 +184,13 @@ pgfCommands = Map.fromList [
|
||||
("cat","generation category"),
|
||||
("lang","uses only functions that have linearizations in all these languages"),
|
||||
("number","number of trees generated"),
|
||||
("depth","the maximum generation depth"),
|
||||
("depth","the maximum generation depth (default: " ++ Common.default_depth_str ++ ")"),
|
||||
("probs", "file with biased probabilities (format 'f 0.4' one by line)")
|
||||
],
|
||||
exec = getEnv $ \ opts arg (Env pgf mos) -> do
|
||||
pgf <- optProbs opts (optRestricted opts pgf)
|
||||
gen <- newStdGen
|
||||
let dp = valIntOpts "depth" 4 opts
|
||||
let dp = valIntOpts "depth" Common.default_depth opts
|
||||
let ts = case mexp (toExprs arg) of
|
||||
Just ex -> generateRandomFromDepth gen pgf ex (Just dp)
|
||||
Nothing -> generateRandomDepth gen pgf (optType pgf opts) (Just dp)
|
||||
@@ -199,28 +201,28 @@ pgfCommands = Map.fromList [
|
||||
synopsis = "generates a list of trees, by default exhaustive",
|
||||
explanation = unlines [
|
||||
"Generates all trees of a given category. By default, ",
|
||||
"the depth is limited to 4, but this can be changed by a flag.",
|
||||
"the depth is limited to " ++ Common.default_depth_str ++ ", but this can be changed by a flag.",
|
||||
"If a Tree argument is given, the command completes the Tree with values",
|
||||
"to all metavariables in the tree."
|
||||
],
|
||||
flags = [
|
||||
("cat","the generation category"),
|
||||
("depth","the maximum generation depth"),
|
||||
("depth","the maximum generation depth (default: " ++ Common.default_depth_str ++ ")"),
|
||||
("lang","excludes functions that have no linearization in this language"),
|
||||
("number","the number of trees generated")
|
||||
],
|
||||
examples = [
|
||||
mkEx "gt -- all trees in the startcat, to depth 4",
|
||||
mkEx "gt -cat=NP -number=16 -- 16 trees in the category NP",
|
||||
mkEx "gt -cat=NP -depth=2 -- trees in the category NP to depth 2",
|
||||
mkEx "gt (AdjCN ? (UseN ?)) -- trees of form (AdjCN ? (UseN ?))"
|
||||
mkEx $ "gt -- all trees in the startcat, to depth " ++ Common.default_depth_str,
|
||||
mkEx "gt -cat=NP -number=16 -- 16 trees in the category NP",
|
||||
mkEx "gt -cat=NP -depth=2 -- trees in the category NP to depth 2",
|
||||
mkEx "gt (AdjCN ? (UseN ?)) -- trees of form (AdjCN ? (UseN ?))"
|
||||
],
|
||||
exec = getEnv $ \ opts arg (Env pgf mos) -> do
|
||||
let pgfr = optRestricted opts pgf
|
||||
let dp = valIntOpts "depth" 4 opts
|
||||
let ts = case mexp (toExprs arg) of
|
||||
Just ex -> generateFromDepth pgfr ex (Just dp)
|
||||
Nothing -> generateAllDepth pgfr (optType pgf opts) (Just dp)
|
||||
let dp = valIntOpts "depth" Common.default_depth opts
|
||||
let ts = case toExprs arg of
|
||||
[] -> generateAllDepth pgfr (optType pgf opts) (Just dp)
|
||||
es -> concat [generateFromDepth pgfr e (Just dp) | e <- es]
|
||||
returnFromExprs $ take (optNumInf opts) ts
|
||||
}),
|
||||
("i", emptyCommandInfo {
|
||||
@@ -428,7 +430,8 @@ pgfCommands = Map.fromList [
|
||||
"are type checking and semantic computation."
|
||||
],
|
||||
examples = [
|
||||
mkEx "pt -compute (plus one two) -- compute value"
|
||||
mkEx "pt -compute (plus one two) -- compute value",
|
||||
mkEx ("p \"the 4 dogs\" | pt -transfer=digits2numeral | l -- \"the four dogs\" ")
|
||||
],
|
||||
exec = getEnv $ \ opts arg (Env pgf mos) ->
|
||||
returnFromExprs . takeOptNum opts . treeOps pgf opts $ toExprs arg,
|
||||
@@ -546,7 +549,7 @@ pgfCommands = Map.fromList [
|
||||
"which is processed by dot (graphviz) and displayed by the program indicated",
|
||||
"by the view flag. The target format is png, unless overridden by the",
|
||||
"flag -format. Results from multiple trees are combined to pdf with convert (ImageMagick).",
|
||||
"See also 'vp -showdep' for another visualization of dependencies."
|
||||
"See also 'vp -showdep' for another visualization of dependencies."
|
||||
],
|
||||
exec = getEnv $ \ opts arg (Env pgf mos) -> do
|
||||
let absname = abstractName pgf
|
||||
@@ -759,7 +762,7 @@ pgfCommands = Map.fromList [
|
||||
[] -> [parse_ pgf lang (optType pgf opts) (Just dp) s | lang <- optLangs pgf opts]
|
||||
open_typs -> [parseWithRecovery pgf lang (optType pgf opts) open_typs (Just dp) s | lang <- optLangs pgf opts]
|
||||
where
|
||||
dp = valIntOpts "depth" 4 opts
|
||||
dp = valIntOpts "depth" Common.default_depth opts
|
||||
|
||||
fromParse opts = foldr (joinPiped . fromParse1 opts) void
|
||||
|
||||
@@ -799,9 +802,9 @@ pgfCommands = Map.fromList [
|
||||
_ | isOpt "tabtreebank" opts ->
|
||||
return $ concat $ intersperse "\t" $ (showExpr [] t) :
|
||||
[s | lang <- optLangs pgf opts, s <- linear pgf opts lang t]
|
||||
_ | isOpt "chunks" opts -> map snd $ linChunks pgf opts t
|
||||
_ | isOpt "chunks" opts -> map snd $ linChunks pgf opts t
|
||||
_ -> [s | lang <- optLangs pgf opts, s<-linear pgf opts lang t]
|
||||
linChunks pgf opts t =
|
||||
linChunks pgf opts t =
|
||||
[(lang, unwords (intersperse "<+>" (map (unlines . linear pgf opts lang) (treeChunks t)))) | lang <- optLangs pgf opts]
|
||||
|
||||
linear :: PGF -> [Option] -> CId -> Expr -> [String]
|
||||
@@ -1005,13 +1008,13 @@ viewLatex view name grphs = do
|
||||
restrictedSystem $ "pdflatex " ++ texfile
|
||||
restrictedSystem $ view ++ " " ++ pdffile
|
||||
return void
|
||||
|
||||
|
||||
---- copied from VisualizeTree ; not sure about proper place AR Nov 2015
|
||||
latexDoc :: [String] -> String
|
||||
latexDoc body = unlines $
|
||||
"\\batchmode"
|
||||
: "\\documentclass{article}"
|
||||
: "\\usepackage[utf8]{inputenc}"
|
||||
: "\\usepackage[utf8]{inputenc}"
|
||||
: "\\begin{document}"
|
||||
: spaces body
|
||||
++ ["\\end{document}"]
|
||||
|
||||
@@ -19,6 +19,12 @@ import Data.Char (isSpace)
|
||||
|
||||
import qualified PGF as H(showCId,showExpr,toATree,toTrie,Trie(..))
|
||||
|
||||
-- store default generation depth in a variable and use everywhere
|
||||
default_depth :: Int
|
||||
default_depth = 5
|
||||
default_depth_str = show default_depth
|
||||
|
||||
|
||||
extend old new = Map.union (Map.fromList new) old -- Map.union is left-biased
|
||||
|
||||
commonCommands :: (Monad m,MonadSIO m) => Map.Map String (CommandInfo m)
|
||||
|
||||
@@ -5,6 +5,8 @@ module GF.Command.TreeOperations (
|
||||
) where
|
||||
|
||||
import PGF(Expr,PGF,CId,compute,mkApp,unApp,unapply,unMeta,exprSize,exprFunctions)
|
||||
import PGF.Data(Expr(EApp,EFun))
|
||||
import PGF.TypeCheck(inferExpr)
|
||||
import Data.List
|
||||
|
||||
type TreeOp = [Expr] -> [Expr]
|
||||
@@ -16,15 +18,17 @@ allTreeOps :: PGF -> [(String,(String,Either TreeOp (CId -> TreeOp)))]
|
||||
allTreeOps pgf = [
|
||||
("compute",("compute by using semantic definitions (def)",
|
||||
Left $ map (compute pgf))),
|
||||
("transfer",("apply this transfer function to all maximal subtrees of suitable type",
|
||||
Right $ \f -> map (transfer pgf f))), -- HL 12/24, modified from gf-3.3
|
||||
("largest",("sort trees from largest to smallest, in number of nodes",
|
||||
Left $ largest)),
|
||||
("nub",("remove duplicate trees",
|
||||
("nub\t",("remove duplicate trees",
|
||||
Left $ nub)),
|
||||
("smallest",("sort trees from smallest to largest, in number of nodes",
|
||||
Left $ smallest)),
|
||||
("subtrees",("return all fully applied subtrees (stopping at abstractions), by default sorted from the largest",
|
||||
Left $ concatMap subtrees)),
|
||||
("funs",("return all fun functions appearing in the tree, with duplications",
|
||||
("funs\t",("return all fun functions appearing in the tree, with duplications",
|
||||
Left $ \es -> [mkApp f [] | e <- es, f <- exprFunctions e]))
|
||||
]
|
||||
|
||||
@@ -48,3 +52,18 @@ subtrees :: Expr -> [Expr]
|
||||
subtrees t = t : case unApp t of
|
||||
Just (f,ts) -> concatMap subtrees ts
|
||||
_ -> [] -- don't go under abstractions
|
||||
|
||||
-- Apply transfer function f:C -> D to all maximal subtrees s:C of tree e and replace
|
||||
-- these s by the values of f(s). This modifies the 'simple-minded transfer' of gf-3.3.
|
||||
-- If applied to strict subtrees s of e, better use with f:C -> C only. HL 12/2024
|
||||
|
||||
transfer :: PGF -> CId -> Expr -> Expr
|
||||
transfer pgf f e = case inferExpr pgf (appf e) of
|
||||
Left _err -> case e of
|
||||
EApp g a -> EApp (transfer pgf f g) (transfer pgf f a)
|
||||
_ -> e
|
||||
Right _ty -> case (compute pgf (appf e)) of
|
||||
v | v /= (appf e) -> v
|
||||
_ -> e -- default case of f, or f has no computation rule
|
||||
where
|
||||
appf = EApp (EFun f)
|
||||
|
||||
@@ -172,11 +172,11 @@ value env t0 =
|
||||
ImplArg t -> (VImplArg.) # value env t
|
||||
Table p res -> liftM2 VTblType # value env p <# value env res
|
||||
RecType rs -> do lovs <- mapPairsM (value env) rs
|
||||
return $ \vs->VRecType $ mapSnd ($vs) lovs
|
||||
return $ \vs->VRecType $ mapSnd ($ vs) lovs
|
||||
t@(ExtR t1 t2) -> ((extR t.)# both id) # both (value env) (t1,t2)
|
||||
FV ts -> ((vfv .) # sequence) # mapM (value env) ts
|
||||
R as -> do lovs <- mapPairsM (value env.snd) as
|
||||
return $ \ vs->VRec $ mapSnd ($vs) lovs
|
||||
return $ \ vs->VRec $ mapSnd ($ vs) lovs
|
||||
T i cs -> valueTable env i cs
|
||||
V ty ts -> do pvs <- paramValues env ty
|
||||
((VV ty pvs .) . sequence) # mapM (value env) ts
|
||||
@@ -376,10 +376,10 @@ valueTable env i cs =
|
||||
where
|
||||
dynamic cs' ty _ = cases cs' # value env ty
|
||||
|
||||
cases cs' vty vs = err keep ($vs) (convertv cs' (vty vs))
|
||||
cases cs' vty vs = err keep ($ vs) (convertv cs' (vty vs))
|
||||
where
|
||||
keep msg = --trace (msg++"\n"++render (ppTerm Unqualified 0 (T i cs))) $
|
||||
VT wild (vty vs) (mapSnd ($vs) cs')
|
||||
VT wild (vty vs) (mapSnd ($ vs) cs')
|
||||
|
||||
wild = case i of TWild _ -> True; _ -> False
|
||||
|
||||
@@ -392,7 +392,7 @@ valueTable env i cs =
|
||||
convert' cs' ((pty,vs),pvs) =
|
||||
do sts <- mapM (matchPattern cs') vs
|
||||
return $ \ vs -> VV pty pvs $ map (err bug id . valueMatch env)
|
||||
(mapFst ($vs) sts)
|
||||
(mapFst ($ vs) sts)
|
||||
|
||||
valueCase (p,t) = do p' <- measurePatt # inlinePattMacro p
|
||||
pvs <- linPattVars p'
|
||||
@@ -430,19 +430,19 @@ apply' :: CompleteEnv -> Term -> [OpenValue] -> Err OpenValue
|
||||
apply' env t [] = value env t
|
||||
apply' env t vs =
|
||||
case t of
|
||||
QC x -> return $ \ svs -> VCApp x (map ($svs) vs)
|
||||
QC x -> return $ \ svs -> VCApp x (map ($ svs) vs)
|
||||
{-
|
||||
Q x@(m,f) | m==cPredef -> return $
|
||||
let constr = --trace ("predef "++show x) .
|
||||
VApp x
|
||||
in \ svs -> maybe constr id (Map.lookup f predefs)
|
||||
$ map ($svs) vs
|
||||
$ map ($ svs) vs
|
||||
| otherwise -> do r <- resource env x
|
||||
return $ \ svs -> vapply (gloc env) r (map ($svs) vs)
|
||||
return $ \ svs -> vapply (gloc env) r (map ($ svs) vs)
|
||||
-}
|
||||
App t1 t2 -> apply' env t1 . (:vs) =<< value env t2
|
||||
_ -> do fv <- value env t
|
||||
return $ \ svs -> vapply (gloc env) (fv svs) (map ($svs) vs)
|
||||
return $ \ svs -> vapply (gloc env) (fv svs) (map ($ svs) vs)
|
||||
|
||||
vapply :: GLocation -> Value -> [Value] -> Value
|
||||
vapply loc v [] = v
|
||||
|
||||
@@ -201,11 +201,11 @@ instance Fail.MonadFail CnvMonad where
|
||||
fail = bug
|
||||
|
||||
instance Applicative CnvMonad where
|
||||
pure = return
|
||||
pure a = CM (\gr c s -> c a s)
|
||||
(<*>) = ap
|
||||
|
||||
instance Monad CnvMonad where
|
||||
return a = CM (\gr c s -> c a s)
|
||||
return = pure
|
||||
CM m >>= k = CM (\gr c s -> m gr (\a s -> unCM (k a) gr c s) s)
|
||||
|
||||
instance MonadState ([ProtoFCat],[Symbol]) CnvMonad where
|
||||
|
||||
@@ -42,11 +42,12 @@ getSourceModule opts file0 =
|
||||
raw <- liftIO $ keepTemp tmp
|
||||
--ePutStrLn $ "1 "++file0
|
||||
(optCoding,parsed) <- parseSource opts pModDef raw
|
||||
let indentLines = unlines . map (" "++) . lines
|
||||
case parsed of
|
||||
Left (Pn l c,msg) -> do file <- liftIO $ writeTemp tmp
|
||||
cwd <- getCurrentDirectory
|
||||
let location = makeRelative cwd file++":"++show l++":"++show c
|
||||
raise (location++":\n "++msg)
|
||||
raise (location++":\n" ++ indentLines msg)
|
||||
Right (i,mi0) ->
|
||||
do liftIO $ removeTemp tmp
|
||||
let mi =mi0 {mflags=mflags mi0 `addOptions` opts, msrc=file0}
|
||||
|
||||
@@ -644,7 +644,7 @@ data TcResult a
|
||||
newtype TcM a = TcM {unTcM :: MetaStore -> [Message] -> TcResult a}
|
||||
|
||||
instance Monad TcM where
|
||||
return x = TcM (\ms msgs -> TcOk x ms msgs)
|
||||
return = pure
|
||||
f >>= g = TcM (\ms msgs -> case unTcM f ms msgs of
|
||||
TcOk x ms msgs -> unTcM (g x) ms msgs
|
||||
TcFail msgs -> TcFail msgs)
|
||||
@@ -659,7 +659,7 @@ instance Fail.MonadFail TcM where
|
||||
|
||||
|
||||
instance Applicative TcM where
|
||||
pure = return
|
||||
pure x = TcM (\ms msgs -> TcOk x ms msgs)
|
||||
(<*>) = ap
|
||||
|
||||
instance Functor TcM where
|
||||
|
||||
@@ -61,11 +61,11 @@ parallelBatchCompile jobs opts rootfiles0 =
|
||||
|
||||
usesPresent (_,paths) = take 1 libs==["present"]
|
||||
where
|
||||
libs = [p|path<-paths,
|
||||
let (d,p0) = splitAt n path
|
||||
p = dropSlash p0,
|
||||
d==lib_dir,p `elem` all_modes]
|
||||
n = length lib_dir
|
||||
libs = [p | path<-paths,
|
||||
let (d,p0) = splitAt n path
|
||||
p = dropSlash p0,
|
||||
d==lib_dir, p `elem` all_modes]
|
||||
n = length lib_dir
|
||||
|
||||
all_modes = ["alltenses","present"]
|
||||
|
||||
@@ -175,7 +175,7 @@ batchCompile1 lib_dir (opts,filepaths) =
|
||||
" from being compiled."
|
||||
else return (maximum ts,(cnc,gr))
|
||||
|
||||
splitEither es = ([x|Left x<-es],[y|Right y<-es])
|
||||
splitEither es = ([x | Left x<-es], [y | Right y<-es])
|
||||
|
||||
canonical path = liftIO $ D.canonicalizePath path `catch` const (return path)
|
||||
|
||||
@@ -238,12 +238,12 @@ runCO (CO m) = do (o,x) <- m
|
||||
instance Functor m => Functor (CollectOutput m) where
|
||||
fmap f (CO m) = CO (fmap (fmap f) m)
|
||||
|
||||
instance (Functor m,Monad m) => Applicative (CollectOutput m) where
|
||||
pure = return
|
||||
instance (Functor m,Monad m) => Applicative (CollectOutput m) where
|
||||
pure x = CO (return (return (),x))
|
||||
(<*>) = ap
|
||||
|
||||
instance Monad m => Monad (CollectOutput m) where
|
||||
return x = CO (return (return (),x))
|
||||
return = pure
|
||||
CO m >>= f = CO $ do (o1,x) <- m
|
||||
let CO m2 = f x
|
||||
(o2,y) <- m2
|
||||
|
||||
@@ -64,11 +64,11 @@ finalStates :: BacktrackM s () -> s -> [s]
|
||||
finalStates bm = map fst . runBM bm
|
||||
|
||||
instance Applicative (BacktrackM s) where
|
||||
pure = return
|
||||
pure a = BM (\c s b -> c a s b)
|
||||
(<*>) = ap
|
||||
|
||||
instance Monad (BacktrackM s) where
|
||||
return a = BM (\c s b -> c a s b)
|
||||
return = pure
|
||||
BM m >>= k = BM (\c s b -> m (\a s b -> unBM (k a) c s b) s b)
|
||||
where unBM (BM m) = m
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ fromErr :: a -> Err a -> a
|
||||
fromErr a = err (const a) id
|
||||
|
||||
instance Monad Err where
|
||||
return = Ok
|
||||
return = pure
|
||||
Ok a >>= f = f a
|
||||
Bad s >>= f = Bad s
|
||||
|
||||
@@ -54,7 +54,7 @@ instance Functor Err where
|
||||
fmap f (Bad s) = Bad s
|
||||
|
||||
instance Applicative Err where
|
||||
pure = return
|
||||
pure = Ok
|
||||
(<*>) = ap
|
||||
|
||||
-- | added by KJ
|
||||
|
||||
@@ -78,6 +78,7 @@ import PGF.Internal (FId, FunId, SeqId, LIndex, Sequence, BindType(..))
|
||||
import Data.Array.IArray(Array)
|
||||
import Data.Array.Unboxed(UArray)
|
||||
import qualified Data.Map as Map
|
||||
import qualified Data.Set as Set
|
||||
import GF.Text.Pretty
|
||||
|
||||
|
||||
@@ -125,10 +126,20 @@ extends :: ModuleInfo -> [ModuleName]
|
||||
extends = map fst . mextend
|
||||
|
||||
isInherited :: MInclude -> Ident -> Bool
|
||||
isInherited c i = case c of
|
||||
MIAll -> True
|
||||
MIOnly is -> elem i is
|
||||
MIExcept is -> notElem i is
|
||||
isInherited c =
|
||||
case c of
|
||||
MIAll -> const True
|
||||
MIOnly is -> elemOrd is
|
||||
MIExcept is -> not . elemOrd is
|
||||
|
||||
-- | Faster version of `elem`, using a `Set`.
|
||||
-- Make sure you give this the first argument _outside_ of the inner loop
|
||||
--
|
||||
-- Example:
|
||||
-- > myIntersection xs ys = filter (elemOrd xs) ys
|
||||
elemOrd :: Ord a => [a] -> a -> Bool
|
||||
elemOrd list = (`Set.member` set)
|
||||
where set = Set.fromList list
|
||||
|
||||
inheritAll :: ModuleName -> (ModuleName,MInclude)
|
||||
inheritAll i = (i,MIAll)
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
module GF.Grammar.Lexer
|
||||
( Token(..), Posn(..)
|
||||
, P, runP, runPartial, token, lexer, getPosn, failLoc
|
||||
, isReservedWord
|
||||
, isReservedWord, invMap
|
||||
) where
|
||||
|
||||
import Control.Applicative
|
||||
@@ -134,7 +134,7 @@ data Token
|
||||
| T_Double Double -- double precision float literals
|
||||
| T_Ident Ident
|
||||
| T_EOF
|
||||
-- deriving Show -- debug
|
||||
deriving (Eq, Ord, Show) -- debug
|
||||
|
||||
res = eitherResIdent
|
||||
eitherResIdent :: (Ident -> Token) -> Ident -> Token
|
||||
@@ -224,6 +224,13 @@ resWords = Map.fromList
|
||||
]
|
||||
where b s t = (identS s, t)
|
||||
|
||||
invMap :: Map.Map Token String
|
||||
invMap = res
|
||||
where
|
||||
lst = Map.toList resWords
|
||||
flp = map (\(k,v) -> (v,showIdent k)) lst
|
||||
res = Map.fromList flp
|
||||
|
||||
unescapeInitTail :: String -> String
|
||||
unescapeInitTail = unesc . tail where
|
||||
unesc s = case s of
|
||||
@@ -276,11 +283,11 @@ instance Functor P where
|
||||
fmap = liftA
|
||||
|
||||
instance Applicative P where
|
||||
pure = return
|
||||
pure a = a `seq` (P $ \s -> POk s a)
|
||||
(<*>) = ap
|
||||
|
||||
instance Monad P where
|
||||
return a = a `seq` (P $ \s -> POk s a)
|
||||
return = pure
|
||||
(P m) >>= k = P $ \ s -> case m s of
|
||||
POk s a -> unP (k a) s
|
||||
PFailed posn err -> PFailed posn err
|
||||
|
||||
@@ -37,6 +37,9 @@ import PGF(mkCId)
|
||||
%name pBNFCRules ListCFRule
|
||||
%name pEBNFRules ListEBNFRule
|
||||
|
||||
%errorhandlertype explist
|
||||
%error { happyError }
|
||||
|
||||
-- no lexer declaration
|
||||
%monad { P } { >>= } { return }
|
||||
%lexer { lexer } { T_EOF }
|
||||
@@ -430,6 +433,7 @@ Exp3
|
||||
RecType xs -> RecType (xs ++ [(tupleLabel (length xs+1),$3)])
|
||||
t -> RecType [(tupleLabel 1,$1), (tupleLabel 2,$3)] }
|
||||
| Exp3 '**' Exp4 { ExtR $1 $3 }
|
||||
| Exp3 '**' '{' ListCase '}' { let v = identS "$vvv" in T TRaw ($4 ++ [(PV v, S $1 (Vr v))]) }
|
||||
| Exp4 { $1 }
|
||||
|
||||
Exp4 :: { Term }
|
||||
@@ -701,8 +705,18 @@ Posn
|
||||
|
||||
{
|
||||
|
||||
happyError :: P a
|
||||
happyError = fail "syntax error"
|
||||
happyError :: (Token, [String]) -> P a
|
||||
happyError (t,strs) = fail $
|
||||
"Syntax error:\n Unexpected " ++ showToken t ++ ".\n Expected one of:\n"
|
||||
++ unlines (map ((" - "++).cleanupToken) strs)
|
||||
|
||||
where
|
||||
cleanupToken "Ident" = "an identifier"
|
||||
cleanupToken x = x
|
||||
showToken (T_Ident i) = "identifier '" ++ showIdent i ++ "'"
|
||||
showToken t = case Map.lookup t invMap of
|
||||
Nothing -> show t
|
||||
Just s -> "token '" ++ s ++"'"
|
||||
|
||||
mkListId,mkConsId,mkBaseId :: Ident -> Ident
|
||||
mkListId = prefixIdent "List"
|
||||
|
||||
@@ -1,13 +1,34 @@
|
||||
{-# LANGUAGE CPP #-}
|
||||
{-# LANGUAGE TemplateHaskell #-}
|
||||
|
||||
module GF.Infra.BuildInfo where
|
||||
import System.Info
|
||||
import Data.Version(showVersion)
|
||||
|
||||
import Language.Haskell.TH.Syntax
|
||||
import Control.Monad.IO.Class
|
||||
import Control.Exception
|
||||
import Data.Time hiding (buildTime)
|
||||
import System.Process
|
||||
|
||||
-- Use Template Haskell to get compile time
|
||||
buildTime :: String
|
||||
buildTime = $(do
|
||||
timeZone <- liftIO getCurrentTimeZone
|
||||
time <- liftIO $ utcToLocalTime timeZone <$> getCurrentTime
|
||||
return $ LitE $ StringL $ formatTime defaultTimeLocale "%F %T" time )
|
||||
|
||||
-- Use Template Haskell to get current Git information
|
||||
gitInfo :: String
|
||||
gitInfo = $(do
|
||||
info <- liftIO $ try $ readProcess "git" ["log", "--format=commit %h tag %(describe:tags=true)", "-1"] "" :: Q (Either SomeException String)
|
||||
return $ LitE $ StringL $ either (\_ -> "unavailable") id info )
|
||||
|
||||
{-# NOINLINE buildInfo #-}
|
||||
buildInfo =
|
||||
"Built on "++os++"/"++arch
|
||||
++" with "++compilerName++"-"++showVersion compilerVersion
|
||||
++", flags:"
|
||||
++" with "++compilerName++"-"++showVersion compilerVersion ++ " at " ++ buildTime ++ "\nGit info: " ++ gitInfo
|
||||
++"\nFlags:"
|
||||
#ifdef USE_INTERRUPT
|
||||
++" interrupt"
|
||||
#endif
|
||||
|
||||
@@ -48,7 +48,7 @@ newtype Check a
|
||||
instance Functor Check where fmap = liftM
|
||||
|
||||
instance Monad Check where
|
||||
return x = Check $ \{-ctxt-} ws -> (ws,Success x)
|
||||
return = pure
|
||||
f >>= g = Check $ \{-ctxt-} ws ->
|
||||
case unCheck f {-ctxt-} ws of
|
||||
(ws,Success x) -> unCheck (g x) {-ctxt-} ws
|
||||
@@ -58,7 +58,7 @@ instance Fail.MonadFail Check where
|
||||
fail = raise
|
||||
|
||||
instance Applicative Check where
|
||||
pure = return
|
||||
pure x = Check $ \{-ctxt-} ws -> (ws,Success x)
|
||||
(<*>) = ap
|
||||
|
||||
instance ErrorMonad Check where
|
||||
|
||||
@@ -52,11 +52,11 @@ newtype SIO a = SIO {unS::PutStr->IO a}
|
||||
instance Functor SIO where fmap = liftM
|
||||
|
||||
instance Applicative SIO where
|
||||
pure = return
|
||||
pure x = SIO (const (pure x))
|
||||
(<*>) = ap
|
||||
|
||||
instance Monad SIO where
|
||||
return x = SIO (const (return x))
|
||||
return = pure
|
||||
SIO m1 >>= xm2 = SIO $ \ h -> m1 h >>= \ x -> unS (xm2 x) h
|
||||
|
||||
instance Fail.MonadFail SIO where
|
||||
|
||||
@@ -32,14 +32,17 @@ import qualified Text.ParserCombinators.ReadP as RP
|
||||
import System.Directory({-getCurrentDirectory,-}getAppUserDataDirectory)
|
||||
import Control.Exception(SomeException,fromException,evaluate,try)
|
||||
import Control.Monad.State hiding (void)
|
||||
import Control.Monad (join, when, (<=<))
|
||||
import qualified GF.System.Signal as IO(runInterruptibly)
|
||||
#ifdef SERVER_MODE
|
||||
import GF.Server(server)
|
||||
#endif
|
||||
|
||||
import GF.Command.Messages(welcome)
|
||||
-- Provides an orphan instance of MonadFail for StateT in ghc versions < 8
|
||||
#if !(MIN_VERSION_base(4,9,0))
|
||||
-- Needed to make it compile on GHC < 8
|
||||
import Control.Monad.Trans.Instances ()
|
||||
#endif
|
||||
|
||||
-- | Run the GF Shell in quiet mode (@gf -run@).
|
||||
mainRunGFI :: Options -> [FilePath] -> IO ()
|
||||
|
||||
@@ -12,7 +12,7 @@ import GF.Command.Abstract
|
||||
import GF.Command.Parse(readCommandLine,pCommand)
|
||||
import GF.Data.Operations (Err(..))
|
||||
import GF.Data.Utilities(whenM,repeatM)
|
||||
|
||||
import Control.Monad (join, when, (<=<))
|
||||
import GF.Infra.UseIO(ioErrorText,putStrLnE)
|
||||
import GF.Infra.SIO
|
||||
import GF.Infra.Option
|
||||
|
||||
@@ -48,7 +48,10 @@ getOptions = do
|
||||
mainOpts :: Options -> [FilePath] -> IO ()
|
||||
mainOpts opts files =
|
||||
case flag optMode opts of
|
||||
ModeVersion -> putStrLn $ "Grammatical Framework (GF) version " ++ showVersion version ++ "\n" ++ buildInfo
|
||||
ModeVersion -> do datadir <- getDataDir
|
||||
putStrLn $ "Grammatical Framework (GF) version " ++ showVersion version ++ "\n" ++
|
||||
buildInfo ++ "\n" ++
|
||||
"Shared folder: " ++ datadir
|
||||
ModeHelp -> putStrLn helpMessage
|
||||
ModeServer port -> GFI1.mainServerGFI opts port files
|
||||
ModeCompiler -> mainGFC opts files
|
||||
|
||||
@@ -44,7 +44,7 @@ $ make install
|
||||
For Windows users
|
||||
-----------------
|
||||
|
||||
- Install MinGW: http://www.mingw.org/. From the installer you need
|
||||
- Install MinGW: http://www.mingw-w64.org/. From the installer you need
|
||||
to select at least the following packages:
|
||||
- Mingw-developer-toolkit
|
||||
- Mingw-base
|
||||
|
||||
@@ -30,6 +30,7 @@ AM_PROG_CC_C_O
|
||||
-Wall\
|
||||
-Wextra\
|
||||
-Wno-missing-field-initializers\
|
||||
-fpermissive\
|
||||
-Wno-unused-parameter\
|
||||
-Wno-unused-value"
|
||||
fi]
|
||||
@@ -43,8 +44,10 @@ case "$target_cpu" in
|
||||
[Define if lightning is targeting the sparc architecture]) ;;
|
||||
powerpc) cpu=ppc; AC_DEFINE(LIGHTNING_PPC, 1,
|
||||
[Define if lightning is targeting the powerpc architecture]) ;;
|
||||
arm*) cpu=arm; AC_DEFINE(LIGHTNING_ARM, 1,
|
||||
arm*) cpu=arm; AC_DEFINE(LIGHTNING_ARM, 1,
|
||||
[Define if lightning is targeting the arm architecture]) ;;
|
||||
aarch64) cpu=aarch64; AC_DEFINE(LIGHTNING_AARCH64, 1,
|
||||
[Define if lightning is targeting the aarch64 architecture]) ;;
|
||||
*) AC_MSG_ERROR([cpu $target_cpu not supported]) ;;
|
||||
esac
|
||||
|
||||
|
||||
@@ -18,24 +18,12 @@ gu_exn_is_raised(GuExn* err) {
|
||||
return err && (err->state == GU_EXN_RAISED);
|
||||
}
|
||||
|
||||
GU_API_DECL void
|
||||
gu_exn_clear(GuExn* err) {
|
||||
err->caught = NULL;
|
||||
err->state = GU_EXN_OK;
|
||||
}
|
||||
|
||||
GU_API bool
|
||||
gu_exn_caught_(GuExn* err, const char* type)
|
||||
{
|
||||
return (err->caught && strcmp(err->caught, type) == 0);
|
||||
}
|
||||
|
||||
GU_API_DECL void*
|
||||
gu_exn_caught_data(GuExn* err)
|
||||
{
|
||||
return err->data.data;
|
||||
}
|
||||
|
||||
GU_API void
|
||||
gu_exn_block(GuExn* err)
|
||||
{
|
||||
|
||||
@@ -71,13 +71,11 @@ gu_new_exn(GuPool* pool);
|
||||
GU_API_DECL bool
|
||||
gu_exn_is_raised(GuExn* err);
|
||||
|
||||
// static inline void
|
||||
// gu_exn_clear(GuExn* err) {
|
||||
// err->caught = NULL;
|
||||
// err->state = GU_EXN_OK;
|
||||
// }
|
||||
GU_API_DECL void
|
||||
gu_exn_clear(GuExn* err);
|
||||
static inline void
|
||||
gu_exn_clear(GuExn* err) {
|
||||
err->caught = NULL;
|
||||
err->state = GU_EXN_OK;
|
||||
}
|
||||
|
||||
#define gu_exn_caught(err, type) \
|
||||
(err->caught && strcmp(err->caught, #type) == 0)
|
||||
@@ -85,13 +83,11 @@ gu_exn_clear(GuExn* err);
|
||||
GU_API_DECL bool
|
||||
gu_exn_caught_(GuExn* err, const char* type);
|
||||
|
||||
// static inline const void*
|
||||
// gu_exn_caught_data(GuExn* err)
|
||||
// {
|
||||
// return err->data.data;
|
||||
// }
|
||||
GU_API_DECL void*
|
||||
gu_exn_caught_data(GuExn* err);
|
||||
static inline const void*
|
||||
gu_exn_caught_data(GuExn* err)
|
||||
{
|
||||
return err->data.data;
|
||||
}
|
||||
|
||||
/// Temporarily block a raised exception.
|
||||
GU_API_DECL void
|
||||
|
||||
@@ -12,17 +12,17 @@ typedef void (*GuFn2)(GuFn* clo, void* arg1, void* arg2);
|
||||
|
||||
static inline void
|
||||
gu_apply0(GuFn* fn) {
|
||||
(*fn)(fn);
|
||||
((GuFn0)(*fn))(fn);
|
||||
}
|
||||
|
||||
static inline void
|
||||
gu_apply1(GuFn* fn, void* arg1) {
|
||||
(*fn)(fn, arg1);
|
||||
((GuFn1)(*fn))(fn, arg1);
|
||||
}
|
||||
|
||||
static inline void
|
||||
gu_apply2(GuFn* fn, void* arg1, void* arg2) {
|
||||
(*fn)(fn, arg1, arg2);
|
||||
((GuFn2)(*fn))(fn, arg1, arg2);
|
||||
}
|
||||
|
||||
#define gu_apply(fn_, ...) \
|
||||
|
||||
@@ -4,11 +4,14 @@
|
||||
#include <pgf/data.h>
|
||||
#include <pgf/reasoner.h>
|
||||
#include <pgf/reader.h>
|
||||
|
||||
#if !defined(__aarch64__)
|
||||
#include "lightning.h"
|
||||
#endif
|
||||
|
||||
//#define PGF_JIT_DEBUG
|
||||
|
||||
#ifdef EMSCRIPTEN
|
||||
#if defined(EMSCRIPTEN) || defined(__aarch64__)
|
||||
|
||||
PGF_INTERNAL PgfJitState*
|
||||
pgf_new_jit(PgfReader* rdr)
|
||||
@@ -19,23 +22,23 @@ pgf_new_jit(PgfReader* rdr)
|
||||
PGF_INTERNAL PgfEvalGates*
|
||||
pgf_jit_gates(PgfReader* rdr)
|
||||
{
|
||||
return NULL;
|
||||
PgfEvalGates* gates = gu_new(PgfEvalGates, rdr->opool);
|
||||
memset(gates, 0, sizeof(*gates));
|
||||
return gates;
|
||||
}
|
||||
|
||||
PGF_INTERNAL void
|
||||
pgf_jit_predicate(PgfReader* rdr, PgfAbstr* abstr,
|
||||
PgfAbsCat* abscat)
|
||||
{
|
||||
size_t n_funs = pgf_read_len(rdr);
|
||||
size_t n_funs = pgf_read_len(rdr);
|
||||
gu_return_on_exn(rdr->err, );
|
||||
|
||||
for (size_t i = 0; i < n_funs; i++) {
|
||||
gu_in_f64be(rdr->in, rdr->err); // ignore
|
||||
gu_return_on_exn(rdr->err, );
|
||||
|
||||
PgfCId name = pgf_read_cid(rdr, rdr->tmp_pool);
|
||||
gu_return_on_exn(rdr->err, );
|
||||
}
|
||||
for (size_t i = 0; i < n_funs; i++) {
|
||||
gu_in_f64be(rdr->in, rdr->err); // ignore
|
||||
gu_return_on_exn(rdr->err,);
|
||||
pgf_read_cid(rdr, rdr->tmp_pool);
|
||||
}
|
||||
}
|
||||
|
||||
PGF_INTERNAL void
|
||||
|
||||
1
src/runtime/c/pgf/lightning/aarch64/asm.h
Normal file
1
src/runtime/c/pgf/lightning/aarch64/asm.h
Normal file
@@ -0,0 +1 @@
|
||||
// DUMMY
|
||||
1
src/runtime/c/pgf/lightning/aarch64/core.h
Normal file
1
src/runtime/c/pgf/lightning/aarch64/core.h
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
1
src/runtime/c/pgf/lightning/aarch64/fp.h
Normal file
1
src/runtime/c/pgf/lightning/aarch64/fp.h
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
1
src/runtime/c/pgf/lightning/aarch64/funcs.h
Normal file
1
src/runtime/c/pgf/lightning/aarch64/funcs.h
Normal file
@@ -0,0 +1 @@
|
||||
// DUMMY
|
||||
@@ -1326,7 +1326,7 @@ pgf_read_concretes(PgfReader* rdr, PgfAbstr* abstr, bool with_content)
|
||||
PGF_INTERNAL PgfPGF*
|
||||
pgf_read_pgf(PgfReader* rdr) {
|
||||
PgfPGF* pgf = gu_new(PgfPGF, rdr->opool);
|
||||
|
||||
|
||||
pgf->major_version = gu_in_u16be(rdr->in, rdr->err);
|
||||
gu_return_on_exn(rdr->err, NULL);
|
||||
|
||||
@@ -1335,7 +1335,7 @@ pgf_read_pgf(PgfReader* rdr) {
|
||||
|
||||
pgf->gflags = pgf_read_flags(rdr);
|
||||
gu_return_on_exn(rdr->err, NULL);
|
||||
|
||||
|
||||
pgf_read_abstract(rdr, &pgf->abstract);
|
||||
gu_return_on_exn(rdr->err, NULL);
|
||||
|
||||
|
||||
3
src/runtime/haskell-bind/Setup.hs
Normal file
3
src/runtime/haskell-bind/Setup.hs
Normal file
@@ -0,0 +1,3 @@
|
||||
import Distribution.Simple
|
||||
|
||||
main = defaultMain
|
||||
@@ -15,7 +15,7 @@ homepage: https://www.grammaticalframework.org/
|
||||
bug-reports: https://github.com/GrammaticalFramework/gf-core/issues
|
||||
author: Krasimir Angelov
|
||||
extra-source-files: CHANGELOG.md, README.md
|
||||
tested-with: GHC==7.10.3, GHC==8.0.2, GHC==8.10.4
|
||||
tested-with: GHC==7.10.3, GHC==8.0.2, GHC==8.10.4, GHC=9.6.6
|
||||
|
||||
library
|
||||
exposed-modules:
|
||||
@@ -26,7 +26,7 @@ library
|
||||
PGF2.Expr,
|
||||
PGF2.Type
|
||||
build-depends:
|
||||
base >= 4.9.1 && < 4.16,
|
||||
base >= 4.9.1 && < 4.22,
|
||||
containers >= 0.5.7 && < 0.7,
|
||||
pretty >= 1.1.3 && < 1.2
|
||||
default-language: Haskell2010
|
||||
|
||||
@@ -74,10 +74,16 @@ import qualified Data.ByteString.Internal as S
|
||||
#endif
|
||||
|
||||
#if defined(__GLASGOW_HASKELL__) && !defined(__HADDOCK__)
|
||||
import GHC.Base(Int(..),uncheckedShiftRL# )
|
||||
import GHC.Base(Int(..),uncheckedShiftRL#,)
|
||||
import GHC.Word (Word32(..),Word16(..),Word64(..))
|
||||
|
||||
#if WORD_SIZE_IN_BITS < 64 && __GLASGOW_HASKELL__ >= 608
|
||||
#if MIN_VERSION_base(4,16,0)
|
||||
import GHC.Exts (wordToWord16#, word16ToWord#, wordToWord32#, word32ToWord#)
|
||||
#endif
|
||||
#if WORD_SIZE_IN_BITS < 64 && __GLASGOW_HASKELL__ >= 608
|
||||
import GHC.Word (uncheckedShiftRL64#)
|
||||
#endif
|
||||
#if __GLASGOW_HASKELL__ >= 900
|
||||
import GHC.Word (uncheckedShiftRL64#)
|
||||
#endif
|
||||
#endif
|
||||
@@ -108,7 +114,7 @@ instance Semigroup Builder where
|
||||
instance Monoid Builder where
|
||||
mempty = empty
|
||||
{-# INLINE mempty #-}
|
||||
mappend = append
|
||||
mappend = (<>)
|
||||
{-# INLINE mappend #-}
|
||||
|
||||
------------------------------------------------------------------------
|
||||
@@ -411,8 +417,14 @@ shiftr_w32 :: Word32 -> Int -> Word32
|
||||
shiftr_w64 :: Word64 -> Int -> Word64
|
||||
|
||||
#if defined(__GLASGOW_HASKELL__) && !defined(__HADDOCK__)
|
||||
#if MIN_VERSION_base(4,16,0)
|
||||
shiftr_w16 (W16# w) (I# i) = W16# (wordToWord16# ((word16ToWord# w) `uncheckedShiftRL#` i))
|
||||
shiftr_w32 (W32# w) (I# i) = W32# (wordToWord32# ((word32ToWord# w) `uncheckedShiftRL#` i))
|
||||
#else
|
||||
shiftr_w16 (W16# w) (I# i) = W16# (w `uncheckedShiftRL#` i)
|
||||
shiftr_w32 (W32# w) (I# i) = W32# (w `uncheckedShiftRL#` i)
|
||||
#endif
|
||||
|
||||
|
||||
#if WORD_SIZE_IN_BITS < 64
|
||||
shiftr_w64 (W64# w) (I# i) = W64# (w `uncheckedShiftRL64#` i)
|
||||
@@ -424,7 +436,11 @@ foreign import ccall unsafe "stg_uncheckedShiftRL64"
|
||||
#endif
|
||||
|
||||
#else
|
||||
#if __GLASGOW_HASKELL__ <= 810
|
||||
shiftr_w64 (W64# w) (I# i) = W64# (w `uncheckedShiftRL#` i)
|
||||
#else
|
||||
shiftr_w64 (W64# w) (I# i) = W64# (w `uncheckedShiftRL64#` i)
|
||||
#endif
|
||||
#endif
|
||||
|
||||
#else
|
||||
|
||||
@@ -101,6 +101,12 @@ import Data.STRef
|
||||
import GHC.Base
|
||||
import GHC.Word
|
||||
--import GHC.Int
|
||||
#if MIN_VERSION_base(4,16,0)
|
||||
import GHC.Exts (wordToWord16#, word16ToWord#, wordToWord32#, word32ToWord#)
|
||||
#endif
|
||||
#if __GLASGOW_HASKELL__ >= 900
|
||||
import GHC.Word (uncheckedShiftL64#)
|
||||
#endif
|
||||
#endif
|
||||
|
||||
-- Control.Monad.Fail import will become redundant in GHC 8.8+
|
||||
@@ -121,11 +127,11 @@ instance Functor Get where
|
||||
{-# INLINE fmap #-}
|
||||
|
||||
instance Applicative Get where
|
||||
pure = return
|
||||
pure a = Get (\s -> (a, s))
|
||||
(<*>) = ap
|
||||
|
||||
instance Monad Get where
|
||||
return a = Get (\s -> (a, s))
|
||||
return = pure
|
||||
{-# INLINE return #-}
|
||||
|
||||
m >>= k = Get (\s -> case unGet m s of
|
||||
@@ -532,8 +538,13 @@ shiftl_w32 :: Word32 -> Int -> Word32
|
||||
shiftl_w64 :: Word64 -> Int -> Word64
|
||||
|
||||
#if defined(__GLASGOW_HASKELL__) && !defined(__HADDOCK__)
|
||||
#if MIN_VERSION_base(4,16,0)
|
||||
shiftl_w16 (W16# w) (I# i) = W16# (wordToWord16# ((word16ToWord# w) `uncheckedShiftL#` i))
|
||||
shiftl_w32 (W32# w) (I# i) = W32# (wordToWord32# ((word32ToWord# w) `uncheckedShiftL#` i))
|
||||
#else
|
||||
shiftl_w16 (W16# w) (I# i) = W16# (w `uncheckedShiftL#` i)
|
||||
shiftl_w32 (W32# w) (I# i) = W32# (w `uncheckedShiftL#` i)
|
||||
#endif
|
||||
|
||||
#if WORD_SIZE_IN_BITS < 64
|
||||
shiftl_w64 (W64# w) (I# i) = W64# (w `uncheckedShiftL64#` i)
|
||||
@@ -545,7 +556,12 @@ foreign import ccall unsafe "stg_uncheckedShiftL64"
|
||||
#endif
|
||||
|
||||
#else
|
||||
#if __GLASGOW_HASKELL__ <= 810
|
||||
shiftl_w64 (W64# w) (I# i) = W64# (w `uncheckedShiftL#` i)
|
||||
#else
|
||||
shiftl_w64 (W64# w) (I# i) = W64# (w `uncheckedShiftL64#` i)
|
||||
#endif
|
||||
|
||||
#endif
|
||||
|
||||
#else
|
||||
|
||||
@@ -77,15 +77,20 @@ instance Functor PutM where
|
||||
{-# INLINE fmap #-}
|
||||
|
||||
instance Applicative PutM where
|
||||
pure = return
|
||||
pure a = Put $ PairS a mempty
|
||||
m <*> k = Put $
|
||||
let PairS f w = unPut m
|
||||
PairS x w' = unPut k
|
||||
in PairS (f x) (w `mappend` w')
|
||||
m *> k = Put $
|
||||
let PairS _ w = unPut m
|
||||
PairS b w' = unPut k
|
||||
in PairS b (w `mappend` w')
|
||||
{-# INLINE (*>) #-}
|
||||
|
||||
-- Standard Writer monad, with aggressive inlining
|
||||
instance Monad PutM where
|
||||
return a = Put $ PairS a mempty
|
||||
return = pure
|
||||
{-# INLINE return #-}
|
||||
|
||||
m >>= k = Put $
|
||||
@@ -94,10 +99,7 @@ instance Monad PutM where
|
||||
in PairS b (w `mappend` w')
|
||||
{-# INLINE (>>=) #-}
|
||||
|
||||
m >> k = Put $
|
||||
let PairS _ w = unPut m
|
||||
PairS b w' = unPut k
|
||||
in PairS b (w `mappend` w')
|
||||
(>>) = (*>)
|
||||
{-# INLINE (>>) #-}
|
||||
|
||||
tell :: Builder -> Put
|
||||
|
||||
@@ -31,7 +31,7 @@ module PGF(
|
||||
languages, abstractName, languageCode,
|
||||
|
||||
-- * Types
|
||||
Type, Hypo,
|
||||
Type, Hypo, BindType(..),
|
||||
showType, readType,
|
||||
mkType, mkHypo, mkDepHypo, mkImplHypo,
|
||||
unType,
|
||||
|
||||
@@ -17,7 +17,8 @@ module PGF.Expr(Tree, BindType(..), Expr(..), Literal(..), Patt(..), Equation(..
|
||||
MetaId,
|
||||
|
||||
-- helpers
|
||||
pMeta,pArg,pLit,freshName,ppMeta,ppLit,ppParens
|
||||
pMeta,pArg,pLit,freshName,ppMeta,ppLit,ppParens,
|
||||
freshBoundVars
|
||||
) where
|
||||
|
||||
import PGF.CId
|
||||
@@ -235,10 +236,11 @@ pLit = liftM LStr (RP.readS_to_P reads)
|
||||
|
||||
ppExpr :: Int -> [CId] -> Expr -> PP.Doc
|
||||
ppExpr d scope (EAbs b x e) = let (bs,xs,e1) = getVars [] [] (EAbs b x e)
|
||||
xs' = freshBoundVars scope xs
|
||||
in ppParens (d > 1) (PP.char '\\' PP.<>
|
||||
PP.hsep (PP.punctuate PP.comma (reverse (List.zipWith ppBind bs xs))) PP.<+>
|
||||
PP.hsep (PP.punctuate PP.comma (reverse (List.zipWith ppBind bs xs'))) PP.<+>
|
||||
PP.text "->" PP.<+>
|
||||
ppExpr 1 (xs++scope) e1)
|
||||
ppExpr 1 (xs' ++ scope) e1)
|
||||
where
|
||||
getVars bs xs (EAbs b x e) = getVars (b:bs) ((freshName x xs):xs) e
|
||||
getVars bs xs e = (bs,xs,e)
|
||||
@@ -289,6 +291,15 @@ freshName x xs0 = loop 1 x
|
||||
| elem y xs = loop (i+1) (mkCId (show x++show i))
|
||||
| otherwise = y
|
||||
|
||||
-- refresh new vars xs in scope if needed. AR 2024-03-01
|
||||
freshBoundVars :: [CId] -> [CId] -> [CId]
|
||||
freshBoundVars scope xs = foldr fresh [] xs
|
||||
where
|
||||
fresh x xs' = mkCId (freshName (showCId x) xs') : xs'
|
||||
freshName s xs' =
|
||||
if elem (mkCId s) (xs' ++ scope)
|
||||
then freshName (s ++ "'") xs'
|
||||
else s
|
||||
|
||||
-----------------------------------------------------
|
||||
-- Computation
|
||||
@@ -397,7 +408,7 @@ match sig f eqs as0 =
|
||||
tryMatch (p ) (VMeta i envi vs ) env = VSusp i envi vs (\v -> tryMatch p v env)
|
||||
tryMatch (p ) (VGen i vs ) env = VConst f as0
|
||||
tryMatch (p ) (VSusp i envi vs k) env = VSusp i envi vs (\v -> tryMatch p (k v) env)
|
||||
tryMatch (p ) v@(VConst _ _ ) env = VConst f as0
|
||||
tryMatch (p ) v@(VConst _ _ ) env = match sig f eqs as0
|
||||
tryMatch (PApp f1 ps1) (VApp f2 vs2 ) env | f1 == f2 = tryMatches eqs (ps1++ps) (vs2++as) res env
|
||||
tryMatch (PLit l1 ) (VLit l2 ) env | l1 == l2 = tryMatches eqs ps as res env
|
||||
tryMatch (PImplArg p ) (VImplArg v ) env = tryMatch p v env
|
||||
|
||||
@@ -81,7 +81,7 @@ linTree pgf cnc e = nub (map snd (lin Nothing 0 e [] [] e []))
|
||||
where
|
||||
lp = lproductions cnc
|
||||
|
||||
lin mb_cty n_fid e0 ys xs (EAbs _ x e) es = lin mb_cty n_fid e0 ys (x:xs) e es
|
||||
lin mb_cty n_fid e0 ys xs (EAbs _ x e) es = lin mb_cty n_fid e0 ys (freshBoundVars (xs ++ ys) [x] ++ xs) e es --fresh: AR 2024
|
||||
lin mb_cty n_fid e0 ys xs (EApp e1 e2) es = lin mb_cty n_fid e0 ys xs e1 (e2:es)
|
||||
lin mb_cty n_fid e0 ys xs (EImplArg e) es = lin mb_cty n_fid e0 ys xs e es
|
||||
lin mb_cty n_fid e0 ys xs (ETyped e _) es = lin mb_cty n_fid e0 ys xs e es
|
||||
|
||||
@@ -94,11 +94,11 @@ class Selector s where
|
||||
select :: CId -> Scope -> Maybe Int -> TcM s (Expr,TType)
|
||||
|
||||
instance Applicative (TcM s) where
|
||||
pure = return
|
||||
pure x = TcM (\abstr k h -> k x)
|
||||
(<*>) = ap
|
||||
|
||||
instance Monad (TcM s) where
|
||||
return x = TcM (\abstr k h -> k x)
|
||||
return = pure
|
||||
f >>= g = TcM (\abstr k h -> unTcM f abstr (\x -> unTcM (g x) abstr k h) h)
|
||||
|
||||
instance Selector s => Alternative (TcM s) where
|
||||
@@ -147,9 +147,9 @@ typeGenerators scope cat = fmap normalize (liftM2 (++) x y)
|
||||
where
|
||||
Scope gamma = scope
|
||||
|
||||
y | cat == cidInt = return [(1.0,ELit (LInt 999), TTyp [] (DTyp [] cat []))]
|
||||
| cat == cidFloat = return [(1.0,ELit (LFlt 3.14), TTyp [] (DTyp [] cat []))]
|
||||
| cat == cidString = return [(1.0,ELit (LStr "Foo"),TTyp [] (DTyp [] cat []))]
|
||||
y | cat == cidInt = return [(0.1, ELit (LInt n), TTyp [] (DTyp [] cat [])) | n <- ints]
|
||||
| cat == cidFloat = return [(0.1, ELit (LFlt d), TTyp [] (DTyp [] cat [])) | d <- floats]
|
||||
| cat == cidString = return [(0.1, ELit (LStr s),TTyp [] (DTyp [] cat [])) | s <- strs]
|
||||
| otherwise = TcM (\abstr k h ms ->
|
||||
case Map.lookup cat (cats abstr) of
|
||||
Just (_,fns,_) -> unTcM (mapM helper fns) abstr k h ms
|
||||
@@ -163,6 +163,11 @@ typeGenerators scope cat = fmap normalize (liftM2 (++) x y)
|
||||
where
|
||||
s = sum [p | (p,_,_) <- gens]
|
||||
|
||||
-- random elements of predefined types: many instead of one AR 2025-01-17
|
||||
ints = [1, 2, 3, 14, 42, 123, 999, 2025, 1000000, 1234567890]
|
||||
floats = [0.0, 1.0, 3.14, 0.999, 0.5772156649, 2.71828, 6.62607015, 19.3, 0.0001, 1.60934]
|
||||
strs = words "A B X Y b c x y foo bar"
|
||||
|
||||
emptyMetaStore :: MetaStore s
|
||||
emptyMetaStore = IntMap.empty
|
||||
|
||||
|
||||
@@ -651,6 +651,7 @@ app macro arg = text "\\" <> text macro <> text "{" <> arg <> text "}"
|
||||
latexDoc :: Doc -> Doc
|
||||
latexDoc body =
|
||||
vcat [text "\\documentclass{article}",
|
||||
text "\\usepackage[a4paper,margin=0.5in,landscape]{geometry}",
|
||||
text "\\usepackage[utf8]{inputenc}",
|
||||
text "\\begin{document}",
|
||||
body,
|
||||
|
||||
@@ -9,16 +9,16 @@ synopsis: Grammatical Framework
|
||||
description: A library for interpreting the Portable Grammar Format (PGF)
|
||||
homepage: https://www.grammaticalframework.org/
|
||||
bug-reports: https://github.com/GrammaticalFramework/gf-core/issues
|
||||
tested-with: GHC==7.10.3, GHC==8.0.2, GHC==8.10.4
|
||||
tested-with: GHC==7.10.3, GHC==8.0.2, GHC==8.10.4, GHC==9.4.5
|
||||
|
||||
library
|
||||
default-language: Haskell2010
|
||||
build-depends:
|
||||
array >= 0.5.1 && < 0.6,
|
||||
base >= 4.9.1 && < 4.16,
|
||||
bytestring >= 0.10.8 && < 0.11,
|
||||
containers >= 0.5.7 && < 0.7,
|
||||
ghc-prim >= 0.5.0 && < 0.7,
|
||||
array >= 0.5.1 && < 0.7,
|
||||
base >= 4.9.1 && < 5.0,
|
||||
bytestring >= 0.10.8 && < 0.12,
|
||||
containers >= 0.5.7 && < 0.8,
|
||||
ghc-prim >= 0.5.0 && < 0.10,
|
||||
mtl >= 2.2.1 && < 2.3,
|
||||
pretty >= 1.1.3 && < 1.2,
|
||||
random >= 1.1 && < 1.3,
|
||||
|
||||
1
src/runtime/javascript/.gitignore
vendored
1
src/runtime/javascript/.gitignore
vendored
@@ -1 +0,0 @@
|
||||
.libs/
|
||||
4
src/runtime/javascript/DEPRECATED.md
Normal file
4
src/runtime/javascript/DEPRECATED.md
Normal file
@@ -0,0 +1,4 @@
|
||||
# Deprecation notice
|
||||
|
||||
As of June 2019, this JavaScript version of the GF runtime is considered deprecated,
|
||||
in favour of the TypeScript version in <https://github.com/GrammaticalFramework/gf-typescript>.
|
||||
@@ -1,48 +0,0 @@
|
||||
FROM emscripten/emsdk:latest
|
||||
|
||||
RUN apt update
|
||||
RUN apt install -y autoconf automake libtool make
|
||||
|
||||
WORKDIR /tmp/c
|
||||
COPY gu/*.c gu/*.h /tmp/c/gu/
|
||||
COPY pgf/*.c pgf/*.h /tmp/c/pgf/
|
||||
COPY pgf/lightning/i386/*.h /tmp/c/pgf/lightning/i386/
|
||||
COPY pgf/lightning/*.h /tmp/c/pgf/lightning/
|
||||
COPY \
|
||||
Makefile.am \
|
||||
configure.ac \
|
||||
lib*.pc.in \
|
||||
/tmp/c/
|
||||
RUN autoreconf -i
|
||||
RUN emconfigure ./configure
|
||||
RUN emmake make
|
||||
RUN emcc .libs/libgu.a .libs/libpgf.a -o pgf.js \
|
||||
-sALLOW_MEMORY_GROWTH \
|
||||
-sEXPORTED_FUNCTIONS="\
|
||||
_pgf_read,\
|
||||
_pgf_abstract_name,\
|
||||
_pgf_read_expr,\
|
||||
_pgf_print_expr,\
|
||||
_pgf_expr_arity,\
|
||||
_gu_new_pool,\
|
||||
_gu_new_exn,\
|
||||
_gu_data_in,\
|
||||
_gu_exn_is_raised,\
|
||||
_gu_exn_caught_,\
|
||||
_gu_exn_caught_data,\
|
||||
_gu_exn_clear,\
|
||||
_gu_new_string_buf,\
|
||||
_gu_string_buf_out,\
|
||||
_gu_string_buf_data,\
|
||||
_malloc,\
|
||||
_free\
|
||||
"\
|
||||
-sEXPORTED_RUNTIME_METHODS="\
|
||||
ccall,\
|
||||
FS,\
|
||||
getValue,\
|
||||
AsciiToString,\
|
||||
stringToUTF8,\
|
||||
UTF8ToString,\
|
||||
allocateUTF8\
|
||||
"
|
||||
@@ -1,11 +0,0 @@
|
||||
# JavaScript runtime using Web Assembly
|
||||
|
||||
This folder contains very early work experimenting with a pure JavaScript runtime,
|
||||
compiled to Web Assembly (WASM) using [Emscripten](https://emscripten.org/).
|
||||
|
||||
1. Compile the WASM files (inside Docker) using `build-wasm.sh`, placing them in `.libs/`
|
||||
2. Test in Node.js by running `node test-node.js [path to PGF]`
|
||||
3. Test in a web browser
|
||||
1. Start a server with `npx serve -l 41296`
|
||||
2. Browse to `http://localhost:41296/test-web.html`
|
||||
3. Check JavaScript console
|
||||
@@ -1,10 +0,0 @@
|
||||
#! /usr/bin/env bash
|
||||
set -e
|
||||
|
||||
# Build inside Docker image
|
||||
IMAGE="gf/build-c-runtime-wasm"
|
||||
docker build ../c --file Dockerfile --tag $IMAGE
|
||||
|
||||
# Copy bulit files from container to host
|
||||
mkdir -p .libs
|
||||
docker run --rm --volume "$PWD":/tmp/host $IMAGE bash -c "cp pgf.js pgf.wasm /tmp/host/.libs/"
|
||||
62
src/runtime/javascript/editor-grammar/Editor.gf
Normal file
62
src/runtime/javascript/editor-grammar/Editor.gf
Normal file
@@ -0,0 +1,62 @@
|
||||
abstract Editor = {
|
||||
|
||||
cat Adjective ;
|
||||
Noun ;
|
||||
Verb ;
|
||||
Determiner ;
|
||||
Sentence ;
|
||||
|
||||
fun Available : Adjective ;
|
||||
Next : Adjective ;
|
||||
Previous : Adjective ;
|
||||
|
||||
fun Bulgarian : Noun ;
|
||||
Danish : Noun ;
|
||||
English : Noun ;
|
||||
Finnish : Noun ;
|
||||
French : Noun ;
|
||||
German : Noun ;
|
||||
Italian : Noun ;
|
||||
Norwegian : Noun ;
|
||||
Russian : Noun ;
|
||||
Spanish : Noun ;
|
||||
Swedish : Noun ;
|
||||
|
||||
fun Float_N : Noun ;
|
||||
Integer_N : Noun ;
|
||||
String_N : Noun ;
|
||||
|
||||
Language : Noun ;
|
||||
Node : Noun ;
|
||||
Page : Noun ;
|
||||
Refinement : Noun ;
|
||||
Tree : Noun ;
|
||||
Wrapper : Noun ;
|
||||
|
||||
fun Copy : Verb ;
|
||||
Cut : Verb ;
|
||||
Delete : Verb ;
|
||||
Enter : Verb ;
|
||||
Parse : Verb ;
|
||||
Paste : Verb ;
|
||||
Redo : Verb ;
|
||||
Refine : Verb ;
|
||||
Replace : Verb ;
|
||||
Select : Verb ;
|
||||
Show : Verb ;
|
||||
Undo : Verb ;
|
||||
Wrap : Verb ;
|
||||
|
||||
fun DefPlDet : Determiner ;
|
||||
DefSgDet : Determiner ;
|
||||
IndefPlDet : Determiner ;
|
||||
IndefSgDet : Determiner ;
|
||||
|
||||
fun Command : Verb -> Determiner -> Noun -> Sentence ;
|
||||
CommandAdj : Verb -> Determiner -> Adjective -> Noun -> Sentence ;
|
||||
ErrorMessage : Adjective -> Noun -> Sentence ;
|
||||
Label : Noun -> Sentence ;
|
||||
RandomlyCommand : Verb -> Determiner -> Noun -> Sentence ;
|
||||
SingleWordCommand : Verb -> Sentence ;
|
||||
|
||||
}
|
||||
63
src/runtime/javascript/editor-grammar/EditorEng.gf
Normal file
63
src/runtime/javascript/editor-grammar/EditorEng.gf
Normal file
@@ -0,0 +1,63 @@
|
||||
--# -path=alltenses
|
||||
concrete EditorEng of Editor = open GrammarEng, ParadigmsEng in {
|
||||
|
||||
lincat Adjective = A ;
|
||||
Noun = N ;
|
||||
Verb = V ;
|
||||
Determiner = Det ;
|
||||
Sentence = Utt ;
|
||||
|
||||
lin Available = mkA "available" ;
|
||||
Next = mkA "next" ;
|
||||
Previous = mkA "previous" ;
|
||||
|
||||
lin Bulgarian = mkN "Bulgarian" ;
|
||||
Danish = mkN "Danish" ;
|
||||
English = mkN "English" ;
|
||||
Finnish = mkN "Finnish" ;
|
||||
French = mkN "French" ;
|
||||
German = mkN "German" ;
|
||||
Italian = mkN "Italian" ;
|
||||
Norwegian = mkN "Norwegian" ;
|
||||
Russian = mkN "Russian" ;
|
||||
Spanish = mkN "Spanish" ;
|
||||
Swedish = mkN "Swedish" ;
|
||||
|
||||
lin Float_N = mkN "float" ;
|
||||
Integer_N = mkN "integer" ;
|
||||
String_N = mkN "string" ;
|
||||
|
||||
Language = mkN "language" ;
|
||||
Node = mkN "node" ;
|
||||
Page = mkN "page" ;
|
||||
Refinement = mkN "refinement" ;
|
||||
Tree = mkN "tree" ;
|
||||
Wrapper = mkN "wrapper" ;
|
||||
|
||||
lin Copy = mkV "copy" ;
|
||||
Cut = mkV "cut" ;
|
||||
Delete = mkV "delete" ;
|
||||
Enter = mkV "enter" ;
|
||||
Parse = mkV "parse" ;
|
||||
Paste = mkV "paste" ;
|
||||
Redo = mkV "redo" ;
|
||||
Refine = mkV "refine" ;
|
||||
Replace = mkV "replace" ;
|
||||
Select = mkV "select" ;
|
||||
Show = mkV "show" ;
|
||||
Undo = mkV "undo" ;
|
||||
Wrap = mkV "wrap" ;
|
||||
|
||||
lin DefPlDet = DetQuant DefArt NumPl ;
|
||||
DefSgDet = DetQuant DefArt NumSg ;
|
||||
IndefPlDet = DetQuant IndefArt NumPl ;
|
||||
IndefSgDet = DetQuant IndefArt NumSg ;
|
||||
|
||||
lin Command v d n = UttImpSg PPos (ImpVP (ComplSlash (SlashV2a (mkV2 v)) (DetCN d (UseN n)))) ;
|
||||
CommandAdj v d a n = UttImpSg PPos (ImpVP (ComplSlash (SlashV2a (mkV2 v)) (DetCN d (AdjCN (PositA a) (UseN n))))) ;
|
||||
ErrorMessage a n = UttNP (DetCN (DetQuant no_Quant NumPl) (AdjCN (PositA a) (UseN n))) ;
|
||||
Label n = UttNP (MassNP (UseN n)) ;
|
||||
RandomlyCommand v d n = UttImpSg PPos (ImpVP (AdvVP (ComplSlash (SlashV2a (mkV2 v)) (DetCN d (UseN n))) (PrepNP (mkPrep "at") (MassNP (UseN (mkN "random")))))) ;
|
||||
SingleWordCommand v = UttImpSg PPos (ImpVP (UseV v)) ;
|
||||
|
||||
}
|
||||
17
src/runtime/javascript/editor.html
Normal file
17
src/runtime/javascript/editor.html
Normal file
@@ -0,0 +1,17 @@
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
|
||||
|
||||
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
|
||||
<head>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
|
||||
<link rel="stylesheet" type="text/css" href="style.css" />
|
||||
<script type="text/javascript" src="gflib.js"></script>
|
||||
<script type="text/javascript" src="editorGrammar.js"></script>
|
||||
<script type="text/javascript" src="grammar.js"></script>
|
||||
<script type="text/javascript" src="gfjseditor.js"></script>
|
||||
<title>Web-based Syntax Editor</title>
|
||||
</head>
|
||||
<body onload="mkEditor('editor', Foods)" onkeydown="return hotKeys(event)">
|
||||
<div id="editor">
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
1
src/runtime/javascript/editorGrammar.js
Normal file
1
src/runtime/javascript/editorGrammar.js
Normal file
File diff suppressed because one or more lines are too long
BIN
src/runtime/javascript/empty.png
Normal file
BIN
src/runtime/javascript/empty.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 161 B |
1310
src/runtime/javascript/gfjseditor.js
Normal file
1310
src/runtime/javascript/gfjseditor.js
Normal file
File diff suppressed because it is too large
Load Diff
54
src/runtime/javascript/gflib-xhtml-voice.js
Normal file
54
src/runtime/javascript/gflib-xhtml-voice.js
Normal file
@@ -0,0 +1,54 @@
|
||||
/* Output */
|
||||
|
||||
function sayText(text) {
|
||||
document.voice_output_text = text;
|
||||
activateForm("voice_output");
|
||||
}
|
||||
|
||||
/* XHTML+Voice Utilities */
|
||||
|
||||
function activateForm(formid) {
|
||||
var form = document.getElementById(formid);
|
||||
var e = document.createEvent("UIEvents");
|
||||
e.initEvent("DOMActivate","true","true");
|
||||
form.dispatchEvent(e);
|
||||
}
|
||||
|
||||
|
||||
/* DOM utilities */
|
||||
|
||||
/* Gets the head element of the document. */
|
||||
function getHeadElement() {
|
||||
var hs = document.getElementsByTagName("head");
|
||||
if (hs.length == 0) {
|
||||
var head = document.createElement("head");
|
||||
document.documentElement.insertBefore(head, document.documentElement.firstChild);
|
||||
return head;
|
||||
} else {
|
||||
return hs[0];
|
||||
}
|
||||
}
|
||||
|
||||
/* Gets the body element of the document. */
|
||||
function getBodyElement() {
|
||||
var bs = document.getElementsByTagName("body");
|
||||
if (bs.length == 0) {
|
||||
var body = document.createElement("body");
|
||||
document.documentElement.appendChild(body);
|
||||
return body;
|
||||
} else {
|
||||
return bs[0];
|
||||
}
|
||||
}
|
||||
|
||||
/* Removes all the children of a node */
|
||||
function removeChildren(node) {
|
||||
while (node.hasChildNodes()) {
|
||||
node.removeChild(node.firstChild);
|
||||
}
|
||||
}
|
||||
|
||||
function setText(node, text) {
|
||||
removeChildren(node);
|
||||
node.appendChild(document.createTextNode(text));
|
||||
}
|
||||
1148
src/runtime/javascript/gflib.js
Normal file
1148
src/runtime/javascript/gflib.js
Normal file
File diff suppressed because it is too large
Load Diff
1
src/runtime/javascript/grammar.js
Normal file
1
src/runtime/javascript/grammar.js
Normal file
File diff suppressed because one or more lines are too long
@@ -1,543 +0,0 @@
|
||||
/**
|
||||
* This module is the high-level JavaScript wrapper around the WASM-compiled version.
|
||||
*/
|
||||
|
||||
async function mkAPI() {
|
||||
|
||||
const sizeof_GuMapItor = 4;
|
||||
const offsetof_GuMapItor_fn = 0;
|
||||
|
||||
var asm = null;
|
||||
var wasmTable = null;
|
||||
var freeTableIndexes = [];
|
||||
|
||||
function setErrNo(value) {
|
||||
HEAP32[asm.__errno_location() >> 2] = value;
|
||||
return value;
|
||||
}
|
||||
|
||||
function abortOnCannotGrowMemory(requestedSize) {
|
||||
abort('Cannot enlarge memory arrays to size ' + requestedSize + ' bytes (OOM). Either (1) compile with -s INITIAL_MEMORY=X with X higher than the current value ' + HEAP8.length + ', (2) compile with -s ALLOW_MEMORY_GROWTH=1 which allows increasing the size at runtime, or (3) if you want malloc to return NULL (0) instead of this abort, compile with -s ABORTING_MALLOC=0 ');
|
||||
}
|
||||
|
||||
function _emscripten_resize_heap(requestedSize) {
|
||||
var oldSize = HEAPU8.length;
|
||||
requestedSize = requestedSize >>> 0;
|
||||
abortOnCannotGrowMemory(requestedSize);
|
||||
}
|
||||
|
||||
var tempRet0 = 0;
|
||||
var urlData = {};
|
||||
var fdData = {};
|
||||
var fdMax = 0;
|
||||
var asmLibraryArg = {
|
||||
"__syscall_fcntl64":
|
||||
function (fd, cmd, varargs) {
|
||||
setErrNo(134);
|
||||
return -1;
|
||||
},
|
||||
|
||||
"__syscall_ioctl":
|
||||
function (fd, op, varargs) {
|
||||
setErrNo(134);
|
||||
return -1;
|
||||
},
|
||||
|
||||
"__syscall_open":
|
||||
function (pathPtr, flags, varargs) {
|
||||
const path = UTF8ToString(pathPtr);
|
||||
const data = urlData[path];
|
||||
if (data == null) {
|
||||
setErrNo(129);
|
||||
return -1;
|
||||
}
|
||||
fdMax++;
|
||||
fdData[fdMax] = {data: data, pos: 0};
|
||||
delete urlData[path];
|
||||
return fdMax;
|
||||
},
|
||||
|
||||
"_munmap_js":
|
||||
function (addr, len, prot, flags, fd, offset) {
|
||||
setErrNo(134);
|
||||
return -1;
|
||||
},
|
||||
|
||||
"abort":
|
||||
function () {
|
||||
console.log('native code called abort()');
|
||||
},
|
||||
|
||||
"emscripten_memcpy_big":
|
||||
function (dest, src, num) {
|
||||
HEAPU8.copyWithin(dest, src, src + num);
|
||||
},
|
||||
|
||||
"emscripten_resize_heap":
|
||||
function _emscripten_resize_heap(requestedSize) {
|
||||
var oldSize = HEAPU8.length;
|
||||
requestedSize = requestedSize >>> 0;
|
||||
abortOnCannotGrowMemory(requestedSize);
|
||||
},
|
||||
|
||||
"fd_close":
|
||||
function (fd) {
|
||||
delete fdData[fd];
|
||||
return 0;
|
||||
},
|
||||
|
||||
"fd_read":
|
||||
function (fd, iov, iovcnt, pnum) {
|
||||
const info = fdData[fd];
|
||||
if (info == null) {
|
||||
setErrNo(121);
|
||||
return -1;
|
||||
}
|
||||
|
||||
let num = 0;
|
||||
for (let i = 0; i < iovcnt; i++) {
|
||||
const ptr = HEAP32[(((iov)+(i*8))>>2)];
|
||||
const len = HEAP32[(((iov)+(i*8 + 4))>>2)];
|
||||
|
||||
let cnt = 0;
|
||||
while (cnt < len && info.pos < info.data.length) {
|
||||
HEAP8[ptr+cnt] = info.data[info.pos];
|
||||
info.pos++
|
||||
cnt++;
|
||||
}
|
||||
|
||||
num += cnt;
|
||||
if (cnt < len) break; // nothing more to read
|
||||
}
|
||||
|
||||
HEAP32[((pnum)>>2)] = num;
|
||||
return 0;
|
||||
},
|
||||
|
||||
"fd_seek":
|
||||
function (fd, offset_low, offset_high, whence, newOffset) {
|
||||
setErrNo(134);
|
||||
return -1;
|
||||
},
|
||||
|
||||
"fd_write":
|
||||
function _fd_write(fd, iov, iovcnt, pnum) {
|
||||
setErrNo(134);
|
||||
return -1;
|
||||
},
|
||||
|
||||
"setTempRet0":
|
||||
function (value) {
|
||||
tempRet0 = value;
|
||||
},
|
||||
|
||||
"__assert_fail":
|
||||
function (condition, filename, line, func) {
|
||||
abort('Assertion failed: ' + UTF8ToString(condition) + ', at: ' + [filename ? UTF8ToString(filename) : 'unknown filename', line, func ? UTF8ToString(func) : 'unknown function']);
|
||||
}
|
||||
};
|
||||
|
||||
// Wraps a JS function as a wasm function with a given signature.
|
||||
function convertJsFunctionToWasm(func, sig) {
|
||||
|
||||
// If the type reflection proposal is available, use the new
|
||||
// "WebAssembly.Function" constructor.
|
||||
// Otherwise, construct a minimal wasm module importing the JS function and
|
||||
// re-exporting it.
|
||||
if (typeof WebAssembly.Function == "function") {
|
||||
var typeNames = {
|
||||
'i': 'i32',
|
||||
'j': 'i64',
|
||||
'f': 'f32',
|
||||
'd': 'f64'
|
||||
};
|
||||
var type = {
|
||||
parameters: [],
|
||||
results: sig[0] == 'v' ? [] : [typeNames[sig[0]]]
|
||||
};
|
||||
for (var i = 1; i < sig.length; ++i) {
|
||||
type.parameters.push(typeNames[sig[i]]);
|
||||
}
|
||||
return new WebAssembly.Function(type, func);
|
||||
}
|
||||
|
||||
// The module is static, with the exception of the type section, which is
|
||||
// generated based on the signature passed in.
|
||||
var typeSection = [
|
||||
0x01, // id: section,
|
||||
0x00, // length: 0 (placeholder)
|
||||
0x01, // count: 1
|
||||
0x60, // form: func
|
||||
];
|
||||
var sigRet = sig.slice(0, 1);
|
||||
var sigParam = sig.slice(1);
|
||||
var typeCodes = {
|
||||
'i': 0x7f, // i32
|
||||
'j': 0x7e, // i64
|
||||
'f': 0x7d, // f32
|
||||
'd': 0x7c, // f64
|
||||
};
|
||||
|
||||
// Parameters, length + signatures
|
||||
typeSection.push(sigParam.length);
|
||||
for (var i = 0; i < sigParam.length; ++i) {
|
||||
typeSection.push(typeCodes[sigParam[i]]);
|
||||
}
|
||||
|
||||
// Return values, length + signatures
|
||||
// With no multi-return in MVP, either 0 (void) or 1 (anything else)
|
||||
if (sigRet == 'v') {
|
||||
typeSection.push(0x00);
|
||||
} else {
|
||||
typeSection = typeSection.concat([0x01, typeCodes[sigRet]]);
|
||||
}
|
||||
|
||||
// Write the overall length of the type section back into the section header
|
||||
// (excepting the 2 bytes for the section id and length)
|
||||
typeSection[1] = typeSection.length - 2;
|
||||
|
||||
// Rest of the module is static
|
||||
var bytes = new Uint8Array([
|
||||
0x00, 0x61, 0x73, 0x6d, // magic ("\0asm")
|
||||
0x01, 0x00, 0x00, 0x00, // version: 1
|
||||
].concat(typeSection, [
|
||||
0x02, 0x07, // import section
|
||||
// (import "e" "f" (func 0 (type 0)))
|
||||
0x01, 0x01, 0x65, 0x01, 0x66, 0x00, 0x00,
|
||||
0x07, 0x05, // export section
|
||||
// (export "f" (func 0 (type 0)))
|
||||
0x01, 0x01, 0x66, 0x00, 0x00,
|
||||
]));
|
||||
|
||||
// We can compile this wasm module synchronously because it is very small.
|
||||
// This accepts an import (at "e.f"), that it reroutes to an export (at "f")
|
||||
var module = new WebAssembly.Module(bytes);
|
||||
var instance = new WebAssembly.Instance(module, {
|
||||
'e': {'f': func}
|
||||
});
|
||||
var wrappedFunc = instance.exports['f'];
|
||||
return wrappedFunc;
|
||||
}
|
||||
|
||||
function addFunction(func, sig) {
|
||||
func = convertJsFunctionToWasm(func, sig);
|
||||
|
||||
let index;
|
||||
|
||||
// Reuse a free index if there is one, otherwise grow.
|
||||
if (freeTableIndexes.length) {
|
||||
index = freeTableIndexes.pop();
|
||||
} else {
|
||||
// Grow the table
|
||||
try {
|
||||
wasmTable.grow(1);
|
||||
} catch (err) {
|
||||
if (!(err instanceof RangeError)) {
|
||||
throw err;
|
||||
}
|
||||
throw 'Unable to grow wasm table. Set ALLOW_TABLE_GROWTH.';
|
||||
}
|
||||
index = wasmTable.length - 1;
|
||||
}
|
||||
|
||||
wasmTable.set(index, func);
|
||||
return index;
|
||||
}
|
||||
|
||||
function removeFunction(index) {
|
||||
freeTableIndexes.push(index);
|
||||
}
|
||||
|
||||
const response = await fetch("pgf.wasm", { credentials: 'same-origin' });
|
||||
|
||||
const info = {
|
||||
'env': asmLibraryArg,
|
||||
'wasi_snapshot_preview1': asmLibraryArg,
|
||||
};
|
||||
|
||||
// Suppress closure warning here since the upstream definition for
|
||||
// instantiateStreaming only allows Promise<Repsponse> rather than
|
||||
// an actual Response.
|
||||
// TODO(https://github.com/google/closure-compiler/pull/3913): Remove if/when upstream closure is fixed.
|
||||
/** @suppress {checkTypes} */
|
||||
const result = await WebAssembly.instantiateStreaming(response, info);
|
||||
|
||||
asm = result["instance"].exports;
|
||||
wasmTable = asm['__indirect_function_table'];
|
||||
const buf = asm['memory'].buffer;
|
||||
const HEAP8 = new Int8Array(buf);
|
||||
const HEAP16 = new Int16Array(buf);
|
||||
const HEAP32 = new Int32Array(buf);
|
||||
const HEAPU8 = new Uint8Array(buf);
|
||||
const HEAPU16 = new Uint16Array(buf);
|
||||
const HEAPU32 = new Uint32Array(buf);
|
||||
const HEAPF32 = new Float32Array(buf);
|
||||
const HEAPF64 = new Float64Array(buf);
|
||||
|
||||
// Returns the number of bytes the given Javascript string takes if encoded as a UTF8 byte array, EXCLUDING the null terminator byte.
|
||||
function lengthBytesUTF8(str) {
|
||||
var len = 0;
|
||||
for (var i = 0; i < str.length; ++i) {
|
||||
// Gotcha: charCodeAt returns a 16-bit word that is a UTF-16 encoded code unit, not a Unicode code point of the character! So decode UTF16->UTF32->UTF8.
|
||||
// See http://unicode.org/faq/utf_bom.html#utf16-3
|
||||
var u = str.charCodeAt(i); // possibly a lead surrogate
|
||||
if (u >= 0xD800 && u <= 0xDFFF) u = 0x10000 + ((u & 0x3FF) << 10) | (str.charCodeAt(++i) & 0x3FF);
|
||||
if (u <= 0x7F) ++len;
|
||||
else if (u <= 0x7FF) len += 2;
|
||||
else if (u <= 0xFFFF) len += 3;
|
||||
else len += 4;
|
||||
}
|
||||
return len;
|
||||
}
|
||||
|
||||
function stringToUTF8Array(str, heap, outIdx, maxBytesToWrite) {
|
||||
if (!(maxBytesToWrite > 0)) // Parameter maxBytesToWrite is not optional. Negative values, 0, null, undefined and false each don't write out any bytes.
|
||||
return 0;
|
||||
|
||||
var startIdx = outIdx;
|
||||
var endIdx = outIdx + maxBytesToWrite - 1; // -1 for string null terminator.
|
||||
for (var i = 0; i < str.length; ++i) {
|
||||
// Gotcha: charCodeAt returns a 16-bit word that is a UTF-16 encoded code unit, not a Unicode code point of the character! So decode UTF16->UTF32->UTF8.
|
||||
// See http://unicode.org/faq/utf_bom.html#utf16-3
|
||||
// For UTF8 byte structure, see http://en.wikipedia.org/wiki/UTF-8#Description and https://www.ietf.org/rfc/rfc2279.txt and https://tools.ietf.org/html/rfc3629
|
||||
var u = str.charCodeAt(i); // possibly a lead surrogate
|
||||
if (u >= 0xD800 && u <= 0xDFFF) {
|
||||
var u1 = str.charCodeAt(++i);
|
||||
u = 0x10000 + ((u & 0x3FF) << 10) | (u1 & 0x3FF);
|
||||
}
|
||||
if (u <= 0x7F) {
|
||||
if (outIdx >= endIdx) break;
|
||||
heap[outIdx++] = u;
|
||||
} else if (u <= 0x7FF) {
|
||||
if (outIdx + 1 >= endIdx) break;
|
||||
heap[outIdx++] = 0xC0 | (u >> 6);
|
||||
heap[outIdx++] = 0x80 | (u & 63);
|
||||
} else if (u <= 0xFFFF) {
|
||||
if (outIdx + 2 >= endIdx) break;
|
||||
heap[outIdx++] = 0xE0 | (u >> 12);
|
||||
heap[outIdx++] = 0x80 | ((u >> 6) & 63);
|
||||
heap[outIdx++] = 0x80 | (u & 63);
|
||||
} else {
|
||||
if (outIdx + 3 >= endIdx) break;
|
||||
if (u > 0x10FFFF) warnOnce('Invalid Unicode code point 0x' + u.toString(16) + ' encountered when serializing a JS string to a UTF-8 string in wasm memory! (Valid unicode code points should be in range 0-0x10FFFF).');
|
||||
heap[outIdx++] = 0xF0 | (u >> 18);
|
||||
heap[outIdx++] = 0x80 | ((u >> 12) & 63);
|
||||
heap[outIdx++] = 0x80 | ((u >> 6) & 63);
|
||||
heap[outIdx++] = 0x80 | (u & 63);
|
||||
}
|
||||
}
|
||||
|
||||
// Null-terminate the pointer to the buffer.
|
||||
heap[outIdx] = 0;
|
||||
return outIdx - startIdx;
|
||||
}
|
||||
|
||||
function allocateUTF8(pool,str) {
|
||||
var size = lengthBytesUTF8(str) + 1;
|
||||
var ptr = asm.gu_malloc(pool,size);
|
||||
if (ptr) stringToUTF8Array(str, HEAP8, ptr, size);
|
||||
return ptr;
|
||||
}
|
||||
|
||||
const UTF8Decoder = typeof TextDecoder != 'undefined' ? new TextDecoder('utf8') : undefined;
|
||||
|
||||
/**
|
||||
* @param {number} idx
|
||||
* @param {number=} maxBytesToRead
|
||||
* @return {string}
|
||||
*/
|
||||
function UTF8ArrayToString(heap, idx, maxBytesToRead) {
|
||||
var endIdx = idx + maxBytesToRead;
|
||||
var endPtr = idx;
|
||||
// TextDecoder needs to know the byte length in advance, it doesn't stop on null terminator by itself.
|
||||
// Also, use the length info to avoid running tiny strings through TextDecoder, since .subarray() allocates garbage.
|
||||
// (As a tiny code save trick, compare endPtr against endIdx using a negation, so that undefined means Infinity)
|
||||
while (heap[endPtr] && !(endPtr >= endIdx)) ++endPtr;
|
||||
|
||||
if (endPtr - idx > 16 && heap.subarray && UTF8Decoder) {
|
||||
return UTF8Decoder.decode(heap.subarray(idx, endPtr));
|
||||
} else {
|
||||
var str = '';
|
||||
// If building with TextDecoder, we have already computed the string length above, so test loop end condition against that
|
||||
while (idx < endPtr) {
|
||||
// For UTF8 byte structure, see:
|
||||
// http://en.wikipedia.org/wiki/UTF-8#Description
|
||||
// https://www.ietf.org/rfc/rfc2279.txt
|
||||
// https://tools.ietf.org/html/rfc3629
|
||||
var u0 = heap[idx++];
|
||||
if (!(u0 & 0x80)) { str += String.fromCharCode(u0); continue; }
|
||||
var u1 = heap[idx++] & 63;
|
||||
if ((u0 & 0xE0) == 0xC0) { str += String.fromCharCode(((u0 & 31) << 6) | u1); continue; }
|
||||
var u2 = heap[idx++] & 63;
|
||||
if ((u0 & 0xF0) == 0xE0) {
|
||||
u0 = ((u0 & 15) << 12) | (u1 << 6) | u2;
|
||||
} else {
|
||||
if ((u0 & 0xF8) != 0xF0) warnOnce('Invalid UTF-8 leading byte 0x' + u0.toString(16) + ' encountered when deserializing a UTF-8 string in wasm memory to a JS string!');
|
||||
u0 = ((u0 & 7) << 18) | (u1 << 12) | (u2 << 6) | (heap[idx++] & 63);
|
||||
}
|
||||
|
||||
if (u0 < 0x10000) {
|
||||
str += String.fromCharCode(u0);
|
||||
} else {
|
||||
var ch = u0 - 0x10000;
|
||||
str += String.fromCharCode(0xD800 | (ch >> 10), 0xDC00 | (ch & 0x3FF));
|
||||
}
|
||||
}
|
||||
}
|
||||
return str;
|
||||
}
|
||||
|
||||
function UTF8ToString(ptr, maxBytesToRead) {
|
||||
return ptr ? UTF8ArrayToString(HEAPU8, ptr, maxBytesToRead) : '';
|
||||
}
|
||||
|
||||
const GuErrnoStrPtr = asm.malloc(8);
|
||||
stringToUTF8Array("GuErrno", HEAP8, GuErrnoStrPtr, 8);
|
||||
|
||||
const PgfExnStrPtr = asm.malloc(8);
|
||||
stringToUTF8Array("PgfExn", HEAP8, PgfExnStrPtr, 8);
|
||||
|
||||
function pgfError(err) {
|
||||
if (asm.gu_exn_caught_(err, GuErrnoStrPtr)) {
|
||||
errDataPtr = asm.gu_exn_caught_data(err);
|
||||
return new Error("errno="+HEAP32[errDataPtr >> 2]);
|
||||
} else if (asm.gu_exn_caught_(err, PgfExnStrPtr)) {
|
||||
msgPtr = asm.gu_exn_caught_data(err);
|
||||
return new Error(UTF8ToString(msgPtr));
|
||||
}
|
||||
return new Error();
|
||||
}
|
||||
|
||||
const registry = new FinalizationRegistry((pool) => {
|
||||
asm.gu_pool_free(pool);
|
||||
});
|
||||
|
||||
function PGF(pgfPtr,name,pool) {
|
||||
this.pgfPtr = pgfPtr;
|
||||
this.abstractName = name;
|
||||
this.pool = pool;
|
||||
this.languages = {};
|
||||
registry.register(this,pool);
|
||||
}
|
||||
|
||||
function Concr(pgf,concrPtr,name) {
|
||||
this.pgf = pgf;
|
||||
this.name = name;
|
||||
this.concrPtr = concrPtr;
|
||||
}
|
||||
Concr.prototype.linearize = function(expr) {
|
||||
const tmp_pool = asm.gu_new_pool();
|
||||
const err = asm.gu_new_exn(tmp_pool);
|
||||
const sb = asm.gu_new_string_buf(tmp_pool);
|
||||
const out = asm.gu_string_buf_out(sb);
|
||||
|
||||
asm.pgf_linearize(this.concrPtr, expr.exprPtr, out, err);
|
||||
if (asm.gu_exn_is_raised(err)) {
|
||||
const e = pgfError(err);
|
||||
asm.gu_pool_free(tmp_pool);
|
||||
throw e;
|
||||
}
|
||||
|
||||
const strPtr = asm.gu_string_buf_data(sb);
|
||||
const len = asm.gu_string_buf_length(sb);
|
||||
const str = UTF8ToString(strPtr,len);
|
||||
asm.gu_pool_free(tmp_pool);
|
||||
|
||||
return str
|
||||
}
|
||||
|
||||
async function readPGF(pgfURL) {
|
||||
const response = await fetch(pgfURL);
|
||||
urlData[pgfURL] = new Int8Array(await response.arrayBuffer());
|
||||
|
||||
const pool = asm.gu_new_pool();
|
||||
|
||||
const tmp_pool = asm.gu_new_pool();
|
||||
const err = asm.gu_new_exn(tmp_pool);
|
||||
const strPtr = allocateUTF8(tmp_pool,pgfURL);
|
||||
const pgfPtr = asm.pgf_read(strPtr,pool,err);
|
||||
if (asm.gu_exn_is_raised(err)) {
|
||||
const e = pgfError(err);
|
||||
asm.gu_pool_free(tmp_pool);
|
||||
throw e;
|
||||
}
|
||||
|
||||
const namePtr = asm.pgf_abstract_name(pgfPtr);
|
||||
const abstractName = UTF8ToString(namePtr);
|
||||
|
||||
const pgf = new PGF(pgfPtr,abstractName,pool);
|
||||
|
||||
const itor = asm.gu_malloc(tmp_pool,sizeof_GuMapItor);
|
||||
const fn =
|
||||
addFunction(
|
||||
(itor,namePtr,concrPtrPtr,err) => {
|
||||
const name = UTF8ToString(namePtr);
|
||||
const concrPtr = HEAP32[concrPtrPtr >> 2];
|
||||
pgf.languages[name] = new Concr(pgf,concrPtr,name);
|
||||
},
|
||||
"viiii"
|
||||
);
|
||||
HEAP32[(itor+offsetof_GuMapItor_fn) >> 2] = fn;
|
||||
asm.pgf_iter_languages(pgfPtr,itor,err);
|
||||
removeFunction(fn);
|
||||
|
||||
asm.gu_pool_free(tmp_pool);
|
||||
return pgf;
|
||||
}
|
||||
|
||||
function Expr(exprPtr,pool) {
|
||||
this.exprPtr = exprPtr;
|
||||
this.pool = pool;
|
||||
registry.register(this,pool);
|
||||
}
|
||||
Expr.prototype.toString = function() {
|
||||
const tmp_pool = asm.gu_new_pool();
|
||||
|
||||
const sb = asm.gu_new_string_buf(tmp_pool);
|
||||
const out = asm.gu_string_buf_out(sb);
|
||||
const err = asm.gu_new_exn(tmp_pool);
|
||||
asm.pgf_print_expr(this.exprPtr, 0, 0, out, err);
|
||||
if (asm.gu_exn_is_raised(err)) {
|
||||
const e = pgfError(err);
|
||||
asm.gu_pool_free(tmp_pool);
|
||||
throw e;
|
||||
}
|
||||
|
||||
const strPtr = asm.gu_string_buf_data(sb);
|
||||
const len = asm.gu_string_buf_length(sb);
|
||||
const str = UTF8ToString(strPtr,len);
|
||||
asm.gu_pool_free(tmp_pool);
|
||||
|
||||
return str;
|
||||
};
|
||||
Expr.prototype.arity = function(expr) {
|
||||
return asm.pgf_expr_arity(this.expr);
|
||||
}
|
||||
|
||||
function readExpr(exprStr) {
|
||||
const tmp_pool = asm.gu_new_pool();
|
||||
|
||||
const strPtr = allocateUTF8(tmp_pool,exprStr);
|
||||
const in_ = asm.gu_data_in(strPtr, exprStr.length, tmp_pool);
|
||||
const err = asm.gu_new_exn(tmp_pool);
|
||||
const pool = asm.gu_new_pool();
|
||||
const expr = asm.pgf_read_expr(in_, pool, tmp_pool, err);
|
||||
asm.gu_pool_free(tmp_pool);
|
||||
|
||||
if (asm.gu_exn_is_raised(err)) {
|
||||
throw pgfError(err);
|
||||
}
|
||||
if (expr == 0) {
|
||||
throw new Error('Expression cannot be parsed');
|
||||
}
|
||||
|
||||
return new Expr(expr,pool);
|
||||
}
|
||||
|
||||
return { readPGF, readExpr };
|
||||
}
|
||||
|
||||
// This allows us to use both from Node and in browser
|
||||
if (typeof module != 'undefined') {
|
||||
module.exports = mkAPI;
|
||||
}
|
||||
BIN
src/runtime/javascript/minus.png
Normal file
BIN
src/runtime/javascript/minus.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 201 B |
BIN
src/runtime/javascript/plus.png
Normal file
BIN
src/runtime/javascript/plus.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 229 B |
252
src/runtime/javascript/style.css
Normal file
252
src/runtime/javascript/style.css
Normal file
@@ -0,0 +1,252 @@
|
||||
body {
|
||||
font-family:arial,helvetica,sans-serif;
|
||||
font-size:12px;
|
||||
background-color: white;
|
||||
}
|
||||
|
||||
#wrapper {
|
||||
width:740px;
|
||||
height:520px;
|
||||
margin:auto 50px;
|
||||
border:1px solid gray;
|
||||
padding:10px;
|
||||
|
||||
}
|
||||
|
||||
#absFrame {
|
||||
width:250px;
|
||||
height:250px;
|
||||
padding:10px;
|
||||
border:1px solid gray;
|
||||
float:left;
|
||||
white-space: nowrap;
|
||||
}
|
||||
|
||||
#conFrame {
|
||||
width:436px;
|
||||
height:250px;
|
||||
margin-left:10px;
|
||||
padding:10px;
|
||||
border:1px solid gray;
|
||||
float:left;
|
||||
white-space: normal;
|
||||
overflow:auto;
|
||||
}
|
||||
|
||||
#actFrame {
|
||||
width:250px;
|
||||
height:170px;
|
||||
margin-top:10px;
|
||||
padding:10px;
|
||||
border:1px solid gray;
|
||||
float:left;
|
||||
overflow:auto;
|
||||
}
|
||||
|
||||
#refFrame {
|
||||
width:436px;
|
||||
height:170px;
|
||||
margin-left:10px;
|
||||
margin-top:10px;
|
||||
padding:10px;
|
||||
border:1px solid gray;
|
||||
float:left;
|
||||
overflow:auto;
|
||||
}
|
||||
|
||||
#messageFrame {
|
||||
width:506px;
|
||||
height:15px;
|
||||
margin-top:10px;
|
||||
margin-right:10px;
|
||||
padding:10px;
|
||||
border:1px solid gray;
|
||||
float:left;
|
||||
overflow:hidden;
|
||||
}
|
||||
|
||||
#clipboardFrame {
|
||||
width:180px;
|
||||
height:15px;
|
||||
margin-top:10px;
|
||||
padding:10px;
|
||||
border:1px solid gray;
|
||||
float:left;
|
||||
overflow:auto;
|
||||
}
|
||||
|
||||
#tree {
|
||||
left: -10px;
|
||||
top: -10px;
|
||||
margin: 0px;
|
||||
padding: 10px;
|
||||
overflow: auto;
|
||||
}
|
||||
|
||||
ul {
|
||||
position: relative;
|
||||
list-style: none;
|
||||
margin-left: 20px;
|
||||
padding: 0px;
|
||||
}
|
||||
|
||||
li {
|
||||
position: relative;
|
||||
}
|
||||
|
||||
img.tree-menu {
|
||||
margin-right: 5px;
|
||||
}
|
||||
|
||||
a.tree:link, a.tree:visited, a.tree:active {
|
||||
color: black;
|
||||
background-color: white;
|
||||
text-decoration: none;
|
||||
margin-right:10px;
|
||||
}
|
||||
|
||||
a.tree:hover {
|
||||
color: blue;
|
||||
background-color: white;
|
||||
text-decoration: underline;
|
||||
margin-right:10px;
|
||||
}
|
||||
|
||||
a.treeSelected:link, a.treeSelected:visited, a.treeSelected:active {
|
||||
color: white;
|
||||
background-color: #3366CC;
|
||||
text-decoration: none;
|
||||
margin-right:10px;
|
||||
}
|
||||
|
||||
a.treeSelected:hover {
|
||||
color: white;
|
||||
background-color: #3366CC;
|
||||
text-decoration: underline;
|
||||
margin-right:10px;
|
||||
}
|
||||
|
||||
a.treeGray:link, a.treeGray:visited, a.treeGray:active {
|
||||
color: silver;
|
||||
background-color: white;
|
||||
text-decoration: none;
|
||||
margin-right:10px;
|
||||
}
|
||||
|
||||
a.treeGray:hover {
|
||||
color: silver;
|
||||
background-color: white;
|
||||
text-decoration: none;
|
||||
margin-right:10px;
|
||||
}
|
||||
|
||||
table.action, table.refinement, table.wrapper, table.tree, table.language {
|
||||
margin: 0px;
|
||||
padding: 0px;
|
||||
border-style: none;
|
||||
border-collapse: collapse;
|
||||
border-spacing: 0px;
|
||||
}
|
||||
|
||||
tr.selected {
|
||||
color: white;
|
||||
background-color: #3366CC;
|
||||
}
|
||||
|
||||
tr.unavailable, tr.closed {
|
||||
color: silver;
|
||||
background-color: white;
|
||||
}
|
||||
|
||||
tr.unavailable:hover {
|
||||
color: silver;
|
||||
background-color: #3366CC;
|
||||
}
|
||||
|
||||
tr.action, tr.refinement, tr.wrapper, tr.tree {
|
||||
color: black;
|
||||
background-color: white;
|
||||
}
|
||||
|
||||
tr.action:hover, tr.refinement:hover, tr.wrapper:hover, tr.tree:hover {
|
||||
color: white;
|
||||
background-color: #3366CC;
|
||||
}
|
||||
|
||||
td.action {
|
||||
width: 220px;
|
||||
margin: 0px;
|
||||
padding: 0px;
|
||||
}
|
||||
|
||||
td.refinement, td.wrapper, td.tree {
|
||||
width: 515px;
|
||||
margin: 0px;
|
||||
padding: 0px;
|
||||
}
|
||||
|
||||
td.hotKey {
|
||||
width: 30px;
|
||||
margin: 0px;
|
||||
padding: 0px;
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
td.language {
|
||||
color: black;
|
||||
background-color: white;
|
||||
margin: 1px;
|
||||
padding: 1px;
|
||||
}
|
||||
|
||||
td.language:hover {
|
||||
color: blue;
|
||||
background-color: white;
|
||||
text-decoration: underline;
|
||||
margin: 1px;
|
||||
padding: 1px;
|
||||
}
|
||||
|
||||
td.selected {
|
||||
color: white;
|
||||
background-color: #3366CC;
|
||||
margin: 1px;
|
||||
padding: 1px;
|
||||
}
|
||||
|
||||
td.selected:hover {
|
||||
color: white;
|
||||
background-color: #3366CC;
|
||||
text-decoration: underline;
|
||||
margin: 1px;
|
||||
padding: 1px;
|
||||
}
|
||||
|
||||
p {
|
||||
margin-bottom: 40px;
|
||||
}
|
||||
|
||||
span.normal {
|
||||
color: black;
|
||||
background-color: white;
|
||||
text-decoration: none;
|
||||
padding-left: 2px;
|
||||
padding-right: 2px;
|
||||
}
|
||||
|
||||
span.edit {
|
||||
color: black;
|
||||
background-color: white;
|
||||
text-decoration: none;
|
||||
border:2px inset;
|
||||
padding-left: 2px;
|
||||
padding-right: 2px;
|
||||
}
|
||||
|
||||
span.selected {
|
||||
color: white;
|
||||
background-color: #3366CC;
|
||||
text-decoration: none;
|
||||
padding-left: 2px;
|
||||
padding-right: 2px;
|
||||
}
|
||||
@@ -1,33 +0,0 @@
|
||||
const Module = require('./.libs/pgf.js');
|
||||
const JSPGF = require('./jspgf.js')(Module);
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
Module.onRuntimeInitialized = () => {
|
||||
|
||||
// Read PGF path from args
|
||||
if (process.argv.length > 2) {
|
||||
const pgfPathHost = process.argv[2];
|
||||
|
||||
// Copy file into filesystem
|
||||
const pgfPathFS = '/tmp/' + path.basename(pgfPathHost);
|
||||
const rawPgf = fs.readFileSync(pgfPathHost);
|
||||
Module.FS.writeFile(pgfPathFS, rawPgf);
|
||||
|
||||
// Read PGF
|
||||
const pgf = JSPGF.readPGF(pgfPathFS);
|
||||
|
||||
// Print its name
|
||||
console.log(JSPGF.abstractName(pgf));
|
||||
}
|
||||
|
||||
// Parse expression
|
||||
const expr = JSPGF.readExpr("Pred (Another (x f))");
|
||||
|
||||
// Show it
|
||||
console.log(JSPGF.showExpr(expr));
|
||||
|
||||
// Print its arity
|
||||
console.log('arity', JSPGF.arity(expr));
|
||||
}
|
||||
|
||||
@@ -1,13 +0,0 @@
|
||||
<!doctype html>
|
||||
<html lang="en-us">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
|
||||
</head>
|
||||
<body>
|
||||
<script type="text/javascript" src="./jspgf.js"></script>
|
||||
<script type="text/javascript" src="./test-web.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
mkAPI().then((pgf) => {
|
||||
// Parse expression
|
||||
const expr = pgf.readExpr("Pred (This Fish) Fresh");
|
||||
|
||||
// Show it
|
||||
console.log(expr.toString());
|
||||
|
||||
// Print its arity
|
||||
console.log('arity', expr.arity());
|
||||
|
||||
pgf.readPGF("Foods.pgf").then((gr) => {
|
||||
// Print the grammar name
|
||||
console.log(gr.abstractName);
|
||||
|
||||
// Access a language and print the concrete name
|
||||
console.log(gr.languages["FoodsEng"].name);
|
||||
|
||||
// Linearize an expression
|
||||
console.log(gr.languages["FoodsEng"].linearize(expr));
|
||||
});
|
||||
});
|
||||
54
src/runtime/javascript/translator.css
Normal file
54
src/runtime/javascript/translator.css
Normal file
@@ -0,0 +1,54 @@
|
||||
body {
|
||||
color: black;
|
||||
background-color: white;
|
||||
}
|
||||
|
||||
dl {
|
||||
|
||||
}
|
||||
|
||||
dt {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
dl dd {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
dl.fromLang dt {
|
||||
display: none;
|
||||
}
|
||||
|
||||
dl.toLang {
|
||||
border-width: 1px 0 0 0;
|
||||
border-style: solid;
|
||||
border-color: #c0c0c0;
|
||||
}
|
||||
|
||||
dl.toLang dt {
|
||||
color: #c0c0c0;
|
||||
display: block;
|
||||
float: left;
|
||||
width: 5em;
|
||||
}
|
||||
|
||||
|
||||
dl.toLang dd {
|
||||
border-width: 0 0 1px 0;
|
||||
border-style: solid;
|
||||
border-color: #c0c0c0;
|
||||
}
|
||||
|
||||
|
||||
ul {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
li {
|
||||
list-style-type: none;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
48
src/runtime/javascript/translator.html
Normal file
48
src/runtime/javascript/translator.html
Normal file
@@ -0,0 +1,48 @@
|
||||
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
|
||||
|
||||
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
|
||||
<head>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
|
||||
<link rel="stylesheet" type="text/css" href="translator.css" />
|
||||
<script type="text/javascript" src="gflib.js"></script>
|
||||
<script type="text/javascript" src="grammar.js"></script>
|
||||
<script type="text/javascript" src="translator.js"></script>
|
||||
<script type="text/javascript">
|
||||
/* CHANGE ME */
|
||||
var grammar = Foods;
|
||||
|
||||
function updateTranslation () {
|
||||
var input = document.getElementById('inputText').value;
|
||||
var fromLang = document.getElementById('fromLang').value;
|
||||
var toLang = document.getElementById('toLang').value;
|
||||
var output = document.getElementById('output');
|
||||
var translation = grammar.translate(input, fromLang, toLang);
|
||||
removeChildren(output);
|
||||
output.appendChild(formatTranslation(translation));
|
||||
}
|
||||
|
||||
function populateLangs () {
|
||||
var f = document.getElementById('fromLang');
|
||||
var t = document.getElementById('toLang');
|
||||
for (var c in grammar.concretes) {
|
||||
addOption(f, c, c);
|
||||
addOption(t, c, c);
|
||||
}
|
||||
}
|
||||
</script>
|
||||
<title>Web-based GF Translator</title>
|
||||
</head>
|
||||
<body onload="populateLangs(grammar, 'fromLang', 'toLang')">
|
||||
<form id="translate">
|
||||
<p>
|
||||
<input type="text" name="inputText" id="inputText" value="this cheese is warm" size="50" />
|
||||
</p>
|
||||
<p>
|
||||
From: <select name="fromLang" id="fromLang" onchange=""><option value="">Any language</option></select>
|
||||
To: <select name="toLang" id="toLang"><option value="">All languages</option></select>
|
||||
<input type="button" value="Translate" onclick="updateTranslation()" />
|
||||
</p>
|
||||
</form>
|
||||
<div id="output"></div>
|
||||
</body>
|
||||
</html>
|
||||
51
src/runtime/javascript/translator.js
Normal file
51
src/runtime/javascript/translator.js
Normal file
@@ -0,0 +1,51 @@
|
||||
function formatTranslation (outputs) {
|
||||
var dl1 = document.createElement("dl");
|
||||
dl1.className = "fromLang";
|
||||
for (var fromLang in outputs) {
|
||||
var ul = document.createElement("ul");
|
||||
addDefinition(dl1, document.createTextNode(fromLang), ul);
|
||||
for (var i in outputs[fromLang]) {
|
||||
var dl2 = document.createElement("dl");
|
||||
dl2.className = "toLang";
|
||||
for (var toLang in outputs[fromLang][i]) {
|
||||
addDefinition(dl2, document.createTextNode(toLang), document.createTextNode(outputs[fromLang][i][toLang]));
|
||||
}
|
||||
addItem(ul, dl2);
|
||||
}
|
||||
}
|
||||
|
||||
return dl1;
|
||||
}
|
||||
|
||||
/* DOM utilities for specific tags */
|
||||
|
||||
function addDefinition (dl, t, d) {
|
||||
var dt = document.createElement("dt");
|
||||
dt.appendChild(t);
|
||||
dl.appendChild(dt);
|
||||
var dd = document.createElement("dd");
|
||||
dd.appendChild(d);
|
||||
dl.appendChild(dd);
|
||||
}
|
||||
|
||||
function addItem (ul, i) {
|
||||
var li = document.createElement("li");
|
||||
li.appendChild(i);
|
||||
ul.appendChild(li);
|
||||
}
|
||||
|
||||
function addOption (select, value, content) {
|
||||
var option = document.createElement("option");
|
||||
option.value = value;
|
||||
option.appendChild(document.createTextNode(content));
|
||||
select.appendChild(option);
|
||||
}
|
||||
|
||||
/* General DOM utilities */
|
||||
|
||||
/* Removes all the children of a node */
|
||||
function removeChildren(node) {
|
||||
while (node.hasChildNodes()) {
|
||||
node.removeChild(node.firstChild);
|
||||
}
|
||||
}
|
||||
@@ -385,7 +385,7 @@ Expr_call(ExprObject* self, PyObject* args, PyObject* kw)
|
||||
pyexpr->pool = gu_new_pool();
|
||||
pyexpr->expr = self->expr;
|
||||
|
||||
for (Py_ssize_t i = 0; i < n_args; i++) {
|
||||
for (size_t i = 0; i < n_args; i++) {
|
||||
PyObject* obj = PyTuple_GetItem(args, i);
|
||||
if (obj->ob_type != &pgf_ExprType) {
|
||||
PyErr_SetString(PyExc_TypeError, "the arguments must be expressions");
|
||||
@@ -549,7 +549,7 @@ Expr_visit(ExprObject* self, PyObject *args)
|
||||
return NULL;
|
||||
}
|
||||
|
||||
for (size_t i = 0; i < app->n_args; i++) {
|
||||
for (size_t i = 0; i < (size_t) app->n_args; i++) {
|
||||
ExprObject* pyarg = (ExprObject*) pgf_ExprType.tp_alloc(&pgf_ExprType, 0);
|
||||
if (pyarg == NULL) {
|
||||
Py_DECREF(args);
|
||||
@@ -856,7 +856,7 @@ Type_init(TypeObject *self, PyObject *args, PyObject *kwds)
|
||||
self->type->cid = gu_string_copy(catname_s, self->pool);
|
||||
|
||||
self->type->n_exprs = n_exprs;
|
||||
for (Py_ssize_t i = 0; i < n_exprs; i++) {
|
||||
for (size_t i = 0; i < n_exprs; i++) {
|
||||
PyObject* obj = PyList_GetItem(py_exprs, i);
|
||||
if (Py_TYPE(obj) != &pgf_ExprType) {
|
||||
PyErr_SetString(PyExc_TypeError, "the arguments in the second list must be expressions");
|
||||
@@ -1184,6 +1184,8 @@ BIND_alloc(PyTypeObject *self, Py_ssize_t nitems)
|
||||
{
|
||||
if (BIND_instance == NULL)
|
||||
BIND_instance = PyType_GenericAlloc(self, nitems);
|
||||
else
|
||||
Py_INCREF(BIND_instance);
|
||||
return BIND_instance;
|
||||
}
|
||||
|
||||
@@ -1678,7 +1680,7 @@ Concr_complete(ConcrObject* self, PyObject *args, PyObject *keywds)
|
||||
static char *kwlist[] = {"sentence", "cat", "prefix", "n", NULL};
|
||||
|
||||
PyObject* sentence0 = NULL;
|
||||
char* sentence = NULL;
|
||||
const char* sentence = NULL;
|
||||
PyObject* start = NULL;
|
||||
GuString prefix = "";
|
||||
bool prefix_bind = false;
|
||||
@@ -1688,10 +1690,10 @@ Concr_complete(ConcrObject* self, PyObject *args, PyObject *keywds)
|
||||
&prefix, &max_count))
|
||||
return NULL;
|
||||
|
||||
IterObject* pyres = (IterObject*)
|
||||
pgf_IterType.tp_alloc(&pgf_IterType, 0);
|
||||
if (pyres == NULL) {
|
||||
return NULL;
|
||||
IterObject* pyres = (IterObject*)
|
||||
pgf_IterType.tp_alloc(&pgf_IterType, 0);
|
||||
if (pyres == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
pyres->source = (PyObject*) self->grammar;
|
||||
@@ -1720,6 +1722,7 @@ Concr_complete(ConcrObject* self, PyObject *args, PyObject *keywds)
|
||||
sentence = PyUnicode_AsUTF8(sentence0);
|
||||
} else {
|
||||
PyErr_SetString(PyExc_TypeError, "The sentence must be either a string or a tuple of string and pgf.BIND");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
PgfType* type;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
from distutils.core import setup, Extension
|
||||
from setuptools import setup, Extension
|
||||
import os
|
||||
|
||||
includes = os.getenv('EXTRA_INCLUDE_DIRS','').split(':')
|
||||
@@ -16,7 +16,7 @@ pgf_module = Extension('pgf',
|
||||
libraries = ['gu', 'pgf'])
|
||||
|
||||
setup (name = 'pgf',
|
||||
version = '1.0',
|
||||
version = '1.1',
|
||||
description = 'Python bindings to the Grammatical Framework\'s PGF runtime',
|
||||
long_description="""\
|
||||
Grammatical Framework (GF) is a programming language for multilingual grammar applications.
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user