gfse: grammars in the cloud, work in progress

This commit is contained in:
hallgren
2011-07-06 16:55:42 +00:00
parent 25ae9b2dc4
commit db4753aa2d
4 changed files with 118 additions and 50 deletions

View File

@@ -78,3 +78,5 @@ table.tabs input[type=button] {
input.string_edit { font-family: inherit; font-size: inherit; } input.string_edit { font-family: inherit; font-size: inherit; }
ul.languages { -moz-column-width: 20em; } ul.languages { -moz-column-width: 20em; }
#sharing h1, #sharing .footer { display: none; }

View File

@@ -34,15 +34,18 @@ function initial_view() {
function draw_grammar_list() { function draw_grammar_list() {
local.put("current",0); local.put("current",0);
editor.innerHTML=""; editor.innerHTML="";
var uploaded=local.get("json_uploaded");
var cloud_upload= var cloud_upload=
a(jsurl("upload_json()"), a(jsurl("upload_json()"),
[node("img",{"class":"cloud", [node("img",{"class":"cloud",
src:"P/1306856253_weather_06.png",alt:"[Up Cloud]", src:"P/1306856253_weather_06.png",alt:"[Up Cloud]",
title:"Click to store your grammars in the cloud"}, title: uploaded
? "Click to upload grammar updates to the cloud"
: "Click to store your grammars in the cloud"},
[])]); [])]);
var home=div_class("home",[node("h3",{}, var home=div_class("home",[node("h3",{},
[text("Your grammars"),cloud_upload])]); [text("Your grammars"),cloud_upload])]);
if(local.get("json_uploaded")) { if(uploaded) {
var cloud_download= var cloud_download=
a(jsurl("download_json()"), a(jsurl("download_json()"),
[node("img",{"class":"cloud", [node("img",{"class":"cloud",
@@ -71,6 +74,7 @@ function draw_grammar_list() {
home.appendChild( home.appendChild(
ul([li([a(jsurl("new_grammar()"),[text("New grammar")])])])); ul([li([a(jsurl("new_grammar()"),[text("New grammar")])])]));
//editor.appendChild(text(local.count)); //editor.appendChild(text(local.count));
home.appendChild(empty_id("div","sharing"));
} }
function new_grammar() { function new_grammar() {
@@ -850,18 +854,29 @@ function upload(g) {
get_dir(upload2); get_dir(upload2);
} }
function upload_json() { function upload_json(cont) {
function upload3(resptext,status) {
local.put("json_uploaded",Date.now());
if(cont) cont();
else {
var sharing=element("sharing");
if(sharing) sharing.innerHTML=resptext;
}
}
function upload2(dir) { function upload2(dir) {
var form=node("form",{method:"post",action:"upload.cgi"+dir}, var prefix=dir.substr(10)+"-" // skip "/tmp/gfse."
[hidden("count.json",local.count)]) var form=new FormData();
for(var i=0;i<local.count;i++) { for(var i=0;i<local.count;i++) {
var g=local.get(i,null); var g=local.get(i,null);
if(g) form.appendChild(hidden(i+".json",JSON.stringify(g))); if(g) {
if(!g.unique_name) {
g.unique_name=prefix+i;
save_grammar(g)
}
form.append(g.unique_name+".json",JSON.stringify(g));
}
} }
editor.appendChild(form); ajax_http_post("upload.cgi"+dir,form,upload3,cont)
form.submit();
form.parentNode.removeChild(form);
local.put("json_uploaded",Date.now());
} }
get_dir(upload2); get_dir(upload2);
@@ -874,10 +889,20 @@ function find_langcode(concs,langcode) {
return null; return null;
} }
function grammar_index() {
var index={}
var count=local.count
for(var i=0;i<count;i++) {
var g=local.get(i,null)
if(g && g.unique_name) index[g.unique_name]=i
}
return index
}
function merge_grammar(i,newg) { function merge_grammar(i,newg) {
var oldg=local.get(i); var oldg=local.get(i);
var keep=""; var keep="";
debug("Merging at "+i);
if(oldg) { if(oldg) {
oldg.basename=newg.basename; oldg.basename=newg.basename;
if(newg.abstract.timestamp<oldg.abstract.timestamp) { if(newg.abstract.timestamp<oldg.abstract.timestamp) {
@@ -897,45 +922,56 @@ function merge_grammar(i,newg) {
return keep; return keep;
} }
function download_json(dir) { function download_json(dir0) {
var dir= dir0 || local.get("dir");
var index=grammar_index();
var downloading=0;
var new_count; function get_list(ok,err) {
dir || (dir=local.get("dir")); ajax_http_get("upload.cgi?ls="+dir,ok,err);
}
function get_file(file,ok,err) { function get_file(file,ok,err) {
downloading++;
ajax_http_get("upload.cgi?download="+encodeURIComponent(dir+"/"+file),ok,err); ajax_http_get("upload.cgi?download="+encodeURIComponent(dir+"/"+file),ok,err);
} }
function download_files_from(count) {
debug(count); function file_failed(errormsg,status) {
var i=count-1; debug(errormsg)
function file_ok(grammar) { downloading--;
var keep=merge_grammar(i,JSON.parse(grammar)); }
if(keep) debug("Keeping "+keep); function file_downloaded(grammar) {
download_files_from(i); downloading--;
} var newg=JSON.parse(grammar);
function file_err() { local.remove(i); download_files_from(i); } debug("Downloaded "+newg.unique_name)
if(count>0) get_file(i+".json",file_ok,file_err) var i=index[newg.unique_name];
if(i!=undefined) merge_grammar(i,newg)
else { else {
local.count=new_count; debug("New")
//alert("Download finished"); newg.index=null;
setTimeout(function(){location.href="."},3000); save_grammar(newg);
} }
if(downloading==0) setTimeout(function(){location.href="."},3000);
} }
function download_files(count) {
new_count=count; function download_files(ls) {
local.put("current",0); local.put("current",0);
download_files_from(count); var files=ls.split(" ");
for(var i in files) get_file(files[i],file_downloaded,file_failed);
} }
get_file("count.json",download_files);
get_list(download_files);
} }
function download_from_cloud() { function download_from_cloud() {
var olddir=local.get("dir",null) var olddir=local.get("dir",null)
var uploaded=local.get("json_uploaded");
var newdir="/tmp/"+location.hash.substr(1) var newdir="/tmp/"+location.hash.substr(1)
if(newdir==olddir || confirm("Cloud grammars will replace your local grammars")) { local.put("dir",newdir);
local.put("dir",newdir); if(olddir && uploaded && newdir!=olddir) {
download_json(newdir) upload_json(function(){download_json(newdir)})
} }
else download_json(newdir)
} }
function timestamp(obj,prop) { function timestamp(obj,prop) {

View File

@@ -1,17 +1,25 @@
import Monad(zipWithM)
import System(getArgs) import System(getArgs)
import CGI(getQuery,string)
import MUtils(apSnd)
main = save =<< getArgs main = save2 =<< getArgs
save [dir] = {-
save1 [dir] =
do fs@[ns,_] <- readIO =<< getContents do fs@[ns,_] <- readIO =<< getContents
nes <- save_all fs nes <- save_all fs
putStrLn $ unwords nes putStrLn $ unwords nes
where where
save_all [ns,cs] = zipWithM write1 ns cs save_all [ns,cs] = mapM (write1 dir) (zip ns cs)
write1 n c = -}
do writeFile (dir++"/"++ne) c
return ne write1 dir (n,c) =
where do writeFile (dir++"/"++ne) c
ne=if '.' `elem` n then n else n++".gf" return ne
where
ne=if '.' `elem` n then n else n++".gf"
save2 [dir] =
do nfs <- getQuery
nes <- mapM (write1 dir . apSnd string) nfs
putStrLn $ unwords nes

View File

@@ -25,7 +25,8 @@ check_grammar() {
chgrp everyone "$dir" chgrp everyone "$dir"
chmod g+ws "$dir" chmod g+ws "$dir"
umask 002 umask 002
files=( $(Reg from-url | LC_CTYPE=sv_SE.ISO8859-1 ./save "$dir") ) # files=( $(Reg from-url | LC_CTYPE=sv_SE.ISO8859-1 ./save "$dir") )
files=( $(LC_CTYPE=sv_SE.ISO8859-1 ./save "$dir") )
gffiles=( ) gffiles=( )
otherfiles=( ) otherfiles=( )
for f in ${files[*]} ; do for f in ${files[*]} ; do
@@ -46,9 +47,9 @@ check_grammar() {
cloudurl="$parent/share.html#${dir##*/}" cloudurl="$parent/share.html#${dir##*/}"
li; link "$cloudurl" "$cloudurl" li; link "$cloudurl" "$cloudurl"
end end
begin dl #begin dl
dt ; echo "◂"; link "javascript:history.back()" "Back to Editor" #dt ; echo "◂"; link "javascript:history.back()" "Back to Editor"
end #end
fi fi
cd $dir cd $dir
@@ -67,6 +68,7 @@ check_grammar() {
end end
begin pre begin pre
ls -l *.pgf ls -l *.pgf
end
else else
end end
begin h3 class=error_message; echo Error; end begin h3 class=error_message; echo Error; end
@@ -78,8 +80,10 @@ check_grammar() {
done done
fi fi
fi fi
begin div class=footer
hr hr
date date
end
# begin pre ; env # begin pre ; env
endall endall
} }
@@ -120,6 +124,7 @@ case "$REQUEST_METHOD" in
;; ;;
*) *)
make_dir make_dir
echo >&2 "Using temporary directory $dir"
check_grammar check_grammar
rm -rf "$dir" rm -rf "$dir"
esac esac
@@ -131,6 +136,23 @@ case "$REQUEST_METHOD" in
cgiheaders cgiheaders
echo_n "/tmp/${dir##*/}" echo_n "/tmp/${dir##*/}"
;; ;;
ls=*)
dir=$(qparse "$QUERY_STRING" ls)
case "$dir" in
/tmp/gfse.*) # shouldn't allow .. in path !!!
path="$documentRoot$dir"
if [ -d "$path" ] ; then
ContentType="text/plain; charset=$charset"
cgiheaders
cd "$path"
echo_n *-*.json
else
error404
fi
;;
*) error400
esac
;;
download=*) download=*)
file=$(qparse "$QUERY_STRING" download) file=$(qparse "$QUERY_STRING" download)
case "$file" in case "$file" in