forked from GitHub/gf-core
gfse: initial support for grammars in the cloud
This lets the user access the same set of grammars from multiple devices. Sharing grammars between multiple users is possible but discouraged at the moment. There is no version handling, so concurrent editing of the same grammar by different users might result in one user overwriting changes made by another user. (The same goes for cuncurrent editing on multiple devices by a single user, of course.)
This commit is contained in:
BIN
src/editor/simple/P/1306856253_weather_06.png
Normal file
BIN
src/editor/simple/P/1306856253_weather_06.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 2.3 KiB |
BIN
src/editor/simple/P/1307545089_weather_04.png
Normal file
BIN
src/editor/simple/P/1307545089_weather_04.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 2.3 KiB |
@@ -4,7 +4,8 @@ h1,h2,h3,h4,small { font-family: sans-serif; }
|
||||
h1:first-child, h2:first-child { margin-top: 0; margin-bottom: 1ex; }
|
||||
|
||||
#editor { max-width: 50em; }
|
||||
div.grammar { border: 1px solid black; background: #9df; }
|
||||
div.home, div.grammar { border: 1px solid black; background: #9df; }
|
||||
div.home { padding: 5px; }
|
||||
div.files { margin: 0 8px 8px 8px; }
|
||||
|
||||
div#file { border: 2px solid #009; border-top-width: 0; }
|
||||
@@ -13,12 +14,12 @@ div#file, pre.plain { background: white; padding: 0.6ex; }
|
||||
|
||||
.slideshow .hidden { display: none; }
|
||||
|
||||
img.right, div.right, div.modtime { float: right; }
|
||||
img.cloud, img.right, div.right, div.modtime { float: right; }
|
||||
.modtime { color: #999; white-space: nowrap; }
|
||||
|
||||
div.namebar { background: #9df; }
|
||||
div.namebar table { width: 100%; }
|
||||
.namebar h3 { margin: 0; color: #009; }
|
||||
.namebar h3, .home h3 { margin: 0; color: #009; }
|
||||
|
||||
td.right { text-align: right; }
|
||||
|
||||
@@ -74,4 +75,6 @@ table.tabs input[type=button] {
|
||||
/*text-decoration: underline;*/
|
||||
}
|
||||
|
||||
input.string_edit { font-family: inherit; font-size: inherit; }
|
||||
input.string_edit { font-family: inherit; font-size: inherit; }
|
||||
|
||||
ul.languages { -moz-column-width: 20em; }
|
||||
|
||||
@@ -12,9 +12,16 @@ function table(rows) { return node("table",{},rows); }
|
||||
function td_right(cs) { return node("td",{"class":"right"},cs); }
|
||||
function jsurl(js) { return "javascript:"+js; }
|
||||
|
||||
function hidden(name,value) {
|
||||
return node("input",{type:"hidden",name:name,value:value},[])
|
||||
}
|
||||
|
||||
function insertBefore(el,ref) { ref.parentNode.insertBefore(el,ref); }
|
||||
|
||||
function insertAfter(el,ref) {
|
||||
ref.parentNode.insertBefore(el,ref.nextSibling);
|
||||
}
|
||||
|
||||
/* -------------------------------------------------------------------------- */
|
||||
|
||||
function initial_view() {
|
||||
@@ -27,7 +34,24 @@ function initial_view() {
|
||||
function draw_grammar_list() {
|
||||
local.put("current",0);
|
||||
editor.innerHTML="";
|
||||
editor.appendChild(node("h3",{},[text("Your grammars")]));
|
||||
var cloud_upload=
|
||||
a(jsurl("upload_json()"),
|
||||
[node("img",{"class":"cloud",
|
||||
src:"P/1306856253_weather_06.png",alt:"[Up Cloud]",
|
||||
title:"Click to store your grammars in the cloud"},
|
||||
[])]);
|
||||
var home=div_class("home",[node("h3",{},
|
||||
[text("Your grammars"),cloud_upload])]);
|
||||
if(local.get("json_uploaded")) {
|
||||
var cloud_download=
|
||||
a(jsurl("download_json()"),
|
||||
[node("img",{"class":"cloud",
|
||||
src:"P/1307545089_weather_04.png",alt:"[Down Cloud]",
|
||||
title:"Click to download grammar updates from the cloud"},
|
||||
[])]);
|
||||
insertAfter(cloud_download,cloud_upload);
|
||||
}
|
||||
editor.appendChild(home)
|
||||
var gs=ul([]);
|
||||
function del(i) { return function () { delete_grammar(i); } }
|
||||
for(var i=0;i<local.count;i++) {
|
||||
@@ -42,9 +66,9 @@ function draw_grammar_list() {
|
||||
editor.appendChild(text("You have not created any grammars yet."));
|
||||
else if(local.count==0)
|
||||
editor.appendChild(text("Your grammar list is empty."));
|
||||
editor.appendChild(gs);
|
||||
home.appendChild(gs);
|
||||
|
||||
editor.appendChild(
|
||||
home.appendChild(
|
||||
ul([li([a(jsurl("new_grammar()"),[text("New grammar")])])]));
|
||||
//editor.appendChild(text(local.count));
|
||||
}
|
||||
@@ -196,6 +220,7 @@ function add_concrete2(ix,code) {
|
||||
var cnc=g.concretes[ci];
|
||||
cnc.langcode=code;
|
||||
adjust_opens(cnc,oldcode,code);
|
||||
timestamp(cnc)
|
||||
}
|
||||
else
|
||||
cs.push(new_concrete(code))
|
||||
@@ -257,7 +282,13 @@ function draw_startcat(g) {
|
||||
function opt(cat) { return option(cat,cat); }
|
||||
var m= node("select",{},map(opt,abs.cats));
|
||||
m.value=startcat;
|
||||
m.onchange=function() { abs.startcat=m.value; save_grammar(g); }
|
||||
m.onchange=function() {
|
||||
if(m.value!=abs.startcat) {
|
||||
abs.startcat=m.value;
|
||||
timestamp(abs);
|
||||
save_grammar(g);
|
||||
}
|
||||
}
|
||||
return indent([kw("flags startcat"),sep(" = "),m]);
|
||||
}
|
||||
|
||||
@@ -271,10 +302,12 @@ function draw_abstract(g) {
|
||||
: text("");
|
||||
function sort_funs() {
|
||||
g.abstract.funs=sort_list(this,g.abstract.funs,"name");
|
||||
timestamp(g.abstract);
|
||||
save_grammar(g);
|
||||
}
|
||||
return div_id("file",
|
||||
[kw("abstract "),ident(g.basename),sep(" = "),
|
||||
draw_timestamp(g.abstract),
|
||||
flags,
|
||||
indent([extensible([kw_cat,
|
||||
indent(draw_cats(g))]),
|
||||
@@ -291,6 +324,7 @@ function add_cat(g,el) {
|
||||
if(err) return err;
|
||||
}
|
||||
for(var i in cats) g.abstract.cats.push(cats[i]);
|
||||
timestamp(g.abstract);
|
||||
reload_grammar(g);
|
||||
return null;
|
||||
}
|
||||
@@ -299,6 +333,7 @@ function add_cat(g,el) {
|
||||
|
||||
function delete_cat(g,ix) {
|
||||
with(g.abstract) cats=delete_ix(cats,ix);
|
||||
timestamp(g.abstract);
|
||||
reload_grammar(g);
|
||||
}
|
||||
|
||||
@@ -310,6 +345,7 @@ function rename_cat(g,el,cat) {
|
||||
var dc=defined_cats(g);
|
||||
if(dc[newcat]) return newcat+" is already in use";
|
||||
g=rename_category(g,cat,newcat);
|
||||
timestamp(g.abstract);
|
||||
reload_grammar(g);
|
||||
}
|
||||
return null;
|
||||
@@ -343,6 +379,7 @@ function add_fun(g,el) {
|
||||
var p=parse_fun(s);
|
||||
if(p.ok) {
|
||||
g.abstract.funs.push(p.ok);
|
||||
timestamp(g.abstract);
|
||||
reload_grammar(g);
|
||||
return null;
|
||||
}
|
||||
@@ -362,6 +399,7 @@ function edit_fun(i) {
|
||||
if(p.ok.name!=old.name) g=rename_function(g,old.name,p.ok.name);
|
||||
if(show_type(p.ok.type)!=show_type(old.type))
|
||||
g=change_lin_lhs(g,p.ok);
|
||||
timestamp(g.abstract);
|
||||
reload_grammar(g);
|
||||
return null;
|
||||
}
|
||||
@@ -374,6 +412,7 @@ function edit_fun(i) {
|
||||
|
||||
function delete_fun(g,ix) {
|
||||
with(g.abstract) funs=delete_ix(funs,ix);
|
||||
timestamp(g.abstract);
|
||||
reload_grammar(g);
|
||||
}
|
||||
|
||||
@@ -438,6 +477,7 @@ function draw_concrete(g,i) {
|
||||
if(err) return err;
|
||||
adjust_opens(conc,conc.langcode,code);
|
||||
conc.langcode=code;
|
||||
timestamp(conc);
|
||||
reload_grammar(g);
|
||||
}
|
||||
string_editor(el,conc.langcode,change_langcode)
|
||||
@@ -448,6 +488,7 @@ function draw_concrete(g,i) {
|
||||
editable("span",ident(conc.langcode),g,
|
||||
edit_langcode,"Change language"),
|
||||
kw(" of "),ident(g.basename),sep(" = "),
|
||||
draw_timestamp(conc),
|
||||
indent([extensible([kw("open "),draw_opens(g,i)])]),
|
||||
indent([kw("lincat"),draw_lincats(g,i)]),
|
||||
indent([kw("lin"),draw_lins(g,i)]),
|
||||
@@ -485,12 +526,14 @@ function add_open2(ix,ci,m) {
|
||||
var conc=g.concretes[ci];
|
||||
conc.opens || (conc.opens=[]);
|
||||
conc.opens.push(m);
|
||||
timestamp(conc);
|
||||
save_grammar(g);
|
||||
open_concrete(g,ci);
|
||||
}
|
||||
|
||||
function delete_open(g,ci,ix) {
|
||||
with(g.concretes[ci]) opens=delete_ix(opens,ix);
|
||||
timestamp(g.concretes[ci]);
|
||||
reload_grammar(g);
|
||||
}
|
||||
|
||||
@@ -521,6 +564,7 @@ function add_param(g,ci,el) {
|
||||
var p=parse_param(s);
|
||||
if(p.ok) {
|
||||
g.concretes[ci].params.push(p.ok);
|
||||
timestamp(g.concretes[ci]);
|
||||
reload_grammar(g);
|
||||
return null;
|
||||
}
|
||||
@@ -536,6 +580,7 @@ function edit_param(ci,i) {
|
||||
var p=parse_param(s);
|
||||
if(p.ok) {
|
||||
g.concretes[ci].params[i]=p.ok;
|
||||
timestamp(g.concretes[ci]);
|
||||
reload_grammar(g);
|
||||
return null;
|
||||
}
|
||||
@@ -549,6 +594,7 @@ function edit_param(ci,i) {
|
||||
|
||||
function delete_param(g,ci,ix) {
|
||||
with(g.concretes[ci]) params=delete_ix(params,ix);
|
||||
timestamp(g.concretes[ci]);
|
||||
reload_grammar(g);
|
||||
}
|
||||
|
||||
@@ -576,6 +622,7 @@ function delete_lincat(g,ci,cat) {
|
||||
var c=g.concretes[ci];
|
||||
for(i=0;i<c.lincats.length && c.lincats[i].cat!=cat;i++);
|
||||
if(i<c.lincats.length) c.lincats=delete_ix(c.lincats,i);
|
||||
timestamp(c);
|
||||
reload_grammar(g);
|
||||
}
|
||||
|
||||
@@ -586,6 +633,7 @@ function draw_lincats(g,i) {
|
||||
function ok(s) {
|
||||
if(c.template) conc.lincats.push({cat:c.cat,type:s});
|
||||
else c.type=s;
|
||||
timestamp(conc);
|
||||
reload_grammar(g);
|
||||
return null;
|
||||
}
|
||||
@@ -615,6 +663,7 @@ function draw_lincats(g,i) {
|
||||
lcs.push(dtmpl(c));
|
||||
function sort_lincats() {
|
||||
conc.lincats=sort_list(this,conc.lincats,"cat");
|
||||
timestamp(conc);
|
||||
save_grammar(g);
|
||||
}
|
||||
return indent_sortable(lcs,sort_lincats);
|
||||
@@ -634,6 +683,7 @@ function add_oper(g,ci,el) {
|
||||
var p=parse_oper(s);
|
||||
if(p.ok) {
|
||||
g.concretes[ci].opers.push(p.ok);
|
||||
timestamp(g.concretes[ci]);
|
||||
reload_grammar(g);
|
||||
return null;
|
||||
}
|
||||
@@ -649,6 +699,7 @@ function edit_oper(ci,i) {
|
||||
var p=parse_oper(s);
|
||||
if(p.ok) {
|
||||
g.concretes[ci].opers[i]=p.ok;
|
||||
timestamp(g.concretes[ci]);
|
||||
reload_grammar(g);
|
||||
return null;
|
||||
}
|
||||
@@ -662,6 +713,7 @@ function edit_oper(ci,i) {
|
||||
|
||||
function delete_oper(g,ci,ix) {
|
||||
with(g.concretes[ci]) opers=delete_ix(opers,ix);
|
||||
timestamp(g.concretes[ci]);
|
||||
reload_grammar(g);
|
||||
}
|
||||
|
||||
@@ -685,6 +737,7 @@ function draw_opers(g,ci) {
|
||||
"Add a new operator definition"));
|
||||
function sort_opers() {
|
||||
conc.opers=sort_list(this,conc.opers,"name");
|
||||
timestamp(conc);
|
||||
save_grammar(g);
|
||||
}
|
||||
return indent_sortable(es,sort_opers);
|
||||
@@ -695,6 +748,7 @@ function delete_lin(g,ci,fun) {
|
||||
var c=g.concretes[ci];
|
||||
for(i=0;i<c.lins.length && c.lins[i].fun!=fun;i++);
|
||||
if(i<c.lins.length) c.lins=delete_ix(c.lins,i);
|
||||
timestamp(c);
|
||||
reload_grammar(g);
|
||||
}
|
||||
|
||||
@@ -720,6 +774,7 @@ function draw_lins(g,i) {
|
||||
if(f.template)
|
||||
conc.lins.push({fun:f.fun,args:f.args,lin:s});
|
||||
else f.lin=s;
|
||||
timestamp(conc);
|
||||
reload_grammar(g);
|
||||
return null;
|
||||
}
|
||||
@@ -758,6 +813,7 @@ function draw_lins(g,i) {
|
||||
}
|
||||
function sort_lins() {
|
||||
conc.lins=sort_list(this,conc.lins,"fun");
|
||||
timestamp(conc);
|
||||
save_grammar(g);
|
||||
}
|
||||
var ls=map(draw_lin,conc.lins);
|
||||
@@ -768,29 +824,128 @@ function draw_lins(g,i) {
|
||||
|
||||
/* -------------------------------------------------------------------------- */
|
||||
|
||||
function upload(g) {
|
||||
function get_dir(cont) {
|
||||
var dir=local.get("dir","");
|
||||
if(dir) upload2(g,dir);
|
||||
if(dir) cont(dir);
|
||||
else ajax_http_get("upload.cgi?dir",
|
||||
function(dir) {
|
||||
local.put("dir",dir);
|
||||
upload2(g,dir);
|
||||
cont(dir);
|
||||
});
|
||||
}
|
||||
|
||||
function upload2(g,dir) {
|
||||
var form=node("form",{method:"post",action:"upload.cgi"+dir},
|
||||
[hidden(g.basename,show_abstract(g))])
|
||||
for(var i in g.concretes)
|
||||
form.appendChild(hidden(g.basename+g.concretes[i].langcode,
|
||||
show_concrete(g.basename)(g.concretes[i])));
|
||||
editor.appendChild(form);
|
||||
form.submit();
|
||||
form.parentNode.removeChild(form);
|
||||
function upload(g) {
|
||||
|
||||
function upload2(dir) {
|
||||
var form=node("form",{method:"post",action:"upload.cgi"+dir},
|
||||
[hidden(g.basename+".gf",show_abstract(g))])
|
||||
for(var i in g.concretes)
|
||||
form.appendChild(hidden(g.basename+g.concretes[i].langcode+".gf",
|
||||
show_concrete(g.basename)(g.concretes[i])));
|
||||
editor.appendChild(form);
|
||||
form.submit();
|
||||
form.parentNode.removeChild(form);
|
||||
}
|
||||
|
||||
get_dir(upload2);
|
||||
}
|
||||
|
||||
function hidden(name,value) {
|
||||
return node("input",{type:"hidden",name:name,value:value},[])
|
||||
function upload_json() {
|
||||
function upload2(dir) {
|
||||
var form=node("form",{method:"post",action:"upload.cgi"+dir},
|
||||
[hidden("count.json",local.count)])
|
||||
for(var i=0;i<local.count;i++) {
|
||||
var g=local.get(i,null);
|
||||
if(g) form.appendChild(hidden(i+".json",JSON.stringify(g)));
|
||||
}
|
||||
editor.appendChild(form);
|
||||
form.submit();
|
||||
form.parentNode.removeChild(form);
|
||||
local.put("json_uploaded",Date.now());
|
||||
}
|
||||
|
||||
get_dir(upload2);
|
||||
}
|
||||
|
||||
function find_langcode(concs,langcode) {
|
||||
for(var ci in concs)
|
||||
if(concs[ci].langcode==langcode)
|
||||
return concs[ci];
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
function merge_grammar(i,newg) {
|
||||
var oldg=local.get(i);
|
||||
var keep="";
|
||||
if(oldg) {
|
||||
oldg.basename=newg.basename;
|
||||
if(newg.abstract.timestamp<oldg.abstract.timestamp) {
|
||||
newg.abstract=newg.abstract
|
||||
keep+=" "+oldg.basename
|
||||
}
|
||||
for(var ci in newg.concretes) {
|
||||
var conc=newg.concretes[ci];
|
||||
var oldconc=find_langcode(oldg.concretes,conc.langcode);
|
||||
if(oldconc && conc.timestamp<oldconc.timestamp) {
|
||||
newg.concretes[ci]=oldconc;
|
||||
keep+=" "+oldg.basename+conc.langcode;
|
||||
}
|
||||
}
|
||||
}
|
||||
local.put(i,newg)
|
||||
return keep;
|
||||
}
|
||||
|
||||
function download_json(dir) {
|
||||
|
||||
var new_count;
|
||||
dir || (dir=local.get("dir"));
|
||||
|
||||
function get_file(file,ok,err) {
|
||||
ajax_http_get("upload.cgi?download="+encodeURIComponent(dir+"/"+file),ok,err);
|
||||
}
|
||||
function download_files_from(count) {
|
||||
debug(count);
|
||||
var i=count-1;
|
||||
function file_ok(grammar) {
|
||||
var keep=merge_grammar(i,JSON.parse(grammar));
|
||||
if(keep) debug("Keeping "+keep);
|
||||
download_files_from(i);
|
||||
}
|
||||
function file_err() { local.remove(i); download_files_from(i); }
|
||||
if(count>0) get_file(i+".json",file_ok,file_err)
|
||||
else {
|
||||
local.count=new_count;
|
||||
//alert("Download finished");
|
||||
setTimeout(function(){location.href="."},3000);
|
||||
}
|
||||
}
|
||||
function download_files(count) {
|
||||
new_count=count;
|
||||
local.put("current",0);
|
||||
download_files_from(count);
|
||||
}
|
||||
get_file("count.json",download_files);
|
||||
}
|
||||
|
||||
function download_from_cloud() {
|
||||
var olddir=local.get("dir",null)
|
||||
var newdir="/tmp/"+location.hash.substr(1)
|
||||
if(newdir==olddir || confirm("Cloud grammars will replace your local grammars")) {
|
||||
local.put("dir",newdir);
|
||||
download_json(newdir)
|
||||
}
|
||||
}
|
||||
|
||||
function timestamp(obj,prop) {
|
||||
obj[prop || "timestamp"]=Date.now();
|
||||
}
|
||||
|
||||
function draw_timestamp(obj) {
|
||||
var t=obj.timestamp;
|
||||
return node("small",{"class":"modtime"},
|
||||
[text(t ? " -- "+new Date(t).toLocaleString() : "")]);
|
||||
}
|
||||
|
||||
/* -------------------------------------------------------------------------- */
|
||||
@@ -919,5 +1074,7 @@ function touch_edit() {
|
||||
|
||||
//document.body.appendChild(empty_id("div","debug"));
|
||||
|
||||
initial_view();
|
||||
touch_edit();
|
||||
if(editor) {
|
||||
initial_view();
|
||||
touch_edit();
|
||||
}
|
||||
|
||||
@@ -1,12 +1,17 @@
|
||||
import Monad(zipWithM_)
|
||||
import Monad(zipWithM)
|
||||
import System(getArgs)
|
||||
|
||||
main = save =<< getArgs
|
||||
|
||||
save [dir] =
|
||||
do fs@[ns,_] <- readIO =<< getContents
|
||||
save_all fs
|
||||
putStrLn $ unwords [n++".gf"|n<-ns]
|
||||
nes <- save_all fs
|
||||
putStrLn $ unwords nes
|
||||
where
|
||||
save_all [ns,cs] = zipWithM_ write1 ns cs
|
||||
write1 n = writeFile (dir++"/"++n++".gf")
|
||||
save_all [ns,cs] = zipWithM write1 ns cs
|
||||
write1 n c =
|
||||
do writeFile (dir++"/"++ne) c
|
||||
return ne
|
||||
where
|
||||
ne=if '.' `elem` n then n else n++".gf"
|
||||
|
||||
@@ -12,7 +12,7 @@ style_url="editor.css"
|
||||
tmp="$documentRoot/tmp"
|
||||
|
||||
make_dir() {
|
||||
dir="$(mktemp -d "$tmp/gfse.XXXXXXXX")"
|
||||
dir="$(mktemp -d "$tmp/gfse.XXXXXXXXXX")"
|
||||
# chmod a+rxw "$dir"
|
||||
chmod a+rx "$dir"
|
||||
cp "grammars.cgi" "$dir"
|
||||
@@ -25,30 +25,58 @@ check_grammar() {
|
||||
chgrp everyone "$dir"
|
||||
chmod g+ws "$dir"
|
||||
umask 002
|
||||
files=$(Reg from-url | LC_CTYPE=sv_SE.ISO8859-1 ./save "$dir")
|
||||
cd $dir
|
||||
begin pre
|
||||
if gf -s -make $files 2>&1 ; then
|
||||
end
|
||||
h3 OK
|
||||
begin dl
|
||||
[ -z "$minibar_url" ] || { dt; echo "▸"; link "$minibar_url?/tmp/${dir##*/}/" "Minibar"; }
|
||||
[ -z "$transquiz_url" ] || { dt; echo "▸"; link "$transquiz_url?/tmp/${dir##*/}/" "Translation Quiz"; }
|
||||
[ -z "$gfshell_url" ] || { dt; echo "▸"; link "$gfshell_url?dir=${dir##*/}" "GF Shell"; }
|
||||
dt ; echo "◂"; link "javascript:history.back()" "Back to Editor"
|
||||
files=( $(Reg from-url | LC_CTYPE=sv_SE.ISO8859-1 ./save "$dir") )
|
||||
gffiles=( )
|
||||
otherfiles=( )
|
||||
for f in ${files[*]} ; do
|
||||
case "$f" in
|
||||
*.gf) gffiles=( ${gffiles[*]} "$f" ) ;;
|
||||
*) otherfiles=( ${otherfiles[*]} "$f" ) ;;
|
||||
esac
|
||||
done
|
||||
|
||||
end
|
||||
begin pre
|
||||
ls -l *.pgf
|
||||
else
|
||||
end
|
||||
begin h3 class=error_message; echo Error; end
|
||||
for f in *.gf ; do
|
||||
h4 "$f"
|
||||
begin pre class=plain
|
||||
cat -n "$f"
|
||||
if [ ${#otherfiles} -gt 0 -a -n "$PATH_INFO" ] ; then
|
||||
echo "Use the following link for shared access to your grammars from multiple devices:"
|
||||
begin ul
|
||||
case "$SERVER_PORT" in
|
||||
80) port="" ;;
|
||||
*) port=":$SERVER_PORT"
|
||||
esac
|
||||
parent="http://$SERVER_NAME$port${REQUEST_URI%/upload.cgi/tmp/gfse.*}"
|
||||
cloudurl="$parent/share.html#${dir##*/}"
|
||||
li; link "$cloudurl" "$cloudurl"
|
||||
end
|
||||
done
|
||||
begin dl
|
||||
dt ; echo "◂"; link "javascript:history.back()" "Back to Editor"
|
||||
end
|
||||
fi
|
||||
|
||||
cd $dir
|
||||
if [ ${#gffiles} -gt 0 ] ; then
|
||||
begin pre
|
||||
echo "gf -s -make ${gffiles[*]}"
|
||||
if gf -s -make ${gffiles[*]} 2>&1 ; then
|
||||
end
|
||||
h3 OK
|
||||
begin dl
|
||||
[ -z "$minibar_url" ] || { dt; echo "▸"; link "$minibar_url?/tmp/${dir##*/}/" "Minibar"; }
|
||||
[ -z "$transquiz_url" ] || { dt; echo "▸"; link "$transquiz_url?/tmp/${dir##*/}/" "Translation Quiz"; }
|
||||
[ -z "$gfshell_url" ] || { dt; echo "▸"; link "$gfshell_url?dir=${dir##*/}" "GF Shell"; }
|
||||
dt ; echo "◂"; link "javascript:history.back()" "Back to Editor"
|
||||
|
||||
end
|
||||
begin pre
|
||||
ls -l *.pgf
|
||||
else
|
||||
end
|
||||
begin h3 class=error_message; echo Error; end
|
||||
for f in ${gffiles[*]} ; do
|
||||
h4 "$f"
|
||||
begin pre class=plain
|
||||
cat -n "$f"
|
||||
end
|
||||
done
|
||||
fi
|
||||
fi
|
||||
hr
|
||||
date
|
||||
@@ -56,6 +84,20 @@ check_grammar() {
|
||||
endall
|
||||
}
|
||||
|
||||
error400() {
|
||||
echo "Status: 400"
|
||||
pagestart "Error"
|
||||
echo "What do you want?"
|
||||
endall
|
||||
}
|
||||
|
||||
error404() {
|
||||
echo "Status: 404"
|
||||
pagestart "Not found"
|
||||
echo "Not found"
|
||||
endall
|
||||
}
|
||||
|
||||
if [ -z "$tmp" ] || ! [ -d "$tmp" ] ; then
|
||||
pagestart "Error"
|
||||
begin pre
|
||||
@@ -87,11 +129,25 @@ case "$REQUEST_METHOD" in
|
||||
dir) make_dir
|
||||
ContentType="text/plain"
|
||||
cgiheaders
|
||||
echo "/tmp/${dir##*/}"
|
||||
echo_n "/tmp/${dir##*/}"
|
||||
;;
|
||||
*) pagestart "Error"
|
||||
echo "What do you want?"
|
||||
endall
|
||||
download=*)
|
||||
file=$(qparse "$QUERY_STRING" download)
|
||||
case "$file" in
|
||||
/tmp/gfse.*/*.json) # shouldn't allow .. in path !!!
|
||||
path="$documentRoot$file"
|
||||
if [ -r "$path" ] ; then
|
||||
ContentType="text/javascript; charset=$charset"
|
||||
cgiheaders
|
||||
cat "$path"
|
||||
else
|
||||
error404
|
||||
fi
|
||||
;;
|
||||
*) error400
|
||||
esac
|
||||
;;
|
||||
*) error400
|
||||
esac
|
||||
esac
|
||||
fi
|
||||
|
||||
Reference in New Issue
Block a user