I tried this:
var $rdf = require('../rdflib.js');
var store = $rdf.graph()
var timeout = 5000 // 5000 ms timeout
var fetcher = new $rdf.Fetcher(store, timeout)
var url = 'http://bshambaugh.rww.io';
var LDP = $rdf.Namespace('http://www.w3.org/ns/ldp#');
fetcher.nowOrWhenFetched(url, function(ok, body, xhr) {
if (!ok) {
console.log("Oops, something happened and couldn't fetch data");
} else {
var me = $rdf.sym('http://bshambaugh.rww.io');
var contains = LDP('contains')
console.log(contains)
var duh = $rdf.queryToSPARQL('SELECT * FROM <http://bshambaugh.rww.io> WHERE { ?s ?p ?o . }');
console.log("I fetched the data");
}
})
But I am getting issues like:
NamedNode { uri: 'http://www.w3.org/ns/ldp#contains' }
/home/brent/rdflib.js/dist/rdflib-node.js:6276
for (var i = 0; i < query.vars.length; i++) {
^
TypeError: Cannot read property 'length' of undefined
at getSelect (/home/brent/rdflib.js/dist/rdflib-node.js:6276:35)
at getSPARQL (/home/brent/rdflib.js/dist/rdflib-node.js:6338:12)
at Object.$rdf.queryToSPARQL (/home/brent/rdflib.js/dist/rdflib-node.js:6341:10)
at /home/brent/rdflibexperiments/exp7.js:18:19
at doneFetch (/home/brent/rdflib.js/dist/rdflib-node.js:9783:52)
at /home/brent/rdflib.js/dist/rdflib-node.js:10479:20
at xhr.handle (/home/brent/rdflib.js/dist/rdflib-node.js:9365:9)
at .onreadystatechange (/home/brent/rdflib.js/dist/rdflib-node.js:10478:19)
at dispatchEvent (/home/brent/rdflib.js/node_modules/xmlhttprequest/lib/XMLHttpRequest.js:591:25)
at setState (/home/brent/rdflib.js/node_modules/xmlhttprequest/lib/XMLHttpRequest.js:610:14)
I am not really sure what I am supposed to call for rdflib.js for a sparql query. Are there any other javascript libraries that allow me to do sparql queries?
I found https://github.com/antoniogarrote/rdfstore-js .
Tuesday, May 31, 2016
Experiments with rdflib.js -- Sixth experiment
Create the file for the code:
vim exp6.js
Write the code:
var $rdf = require('../rdflib.js');
var store = $rdf.graph()
var timeout = 5000 // 5000 ms timeout
var fetcher = new $rdf.Fetcher(store, timeout)
var url = 'http://bshambaugh.rww.io';
var LDP = $rdf.Namespace('http://www.w3.org/ns/ldp#');
fetcher.nowOrWhenFetched(url, function(ok, body, xhr) {
if (!ok) {
console.log("Oops, something happened and couldn't fetch data");
} else {
var me = $rdf.sym('http://bshambaugh.rww.io');
var contains = LDP('contains')
console.log(contains)
var friend = store.each(me, contains, undefined)
// Any one person
// do something with the data in the store (see below)
for (var i=0; i<friend.length;i++) {
console.log(friend[i])
}
console.log("I fetched the data");
}
})
Run the code:
node exp6.js
To get the results:
NamedNode { uri: 'http://www.w3.org/ns/ldp#contains' }
NamedNode { uri: 'http://bshambaugh.rww.io/.acl' }
NamedNode { uri: 'http://bshambaugh.rww.io/profile/' }
NamedNode { uri: 'http://bshambaugh.rww.io/storage/' }
I fetched the data
vim exp6.js
Write the code:
var $rdf = require('../rdflib.js');
var store = $rdf.graph()
var timeout = 5000 // 5000 ms timeout
var fetcher = new $rdf.Fetcher(store, timeout)
var url = 'http://bshambaugh.rww.io';
var LDP = $rdf.Namespace('http://www.w3.org/ns/ldp#');
fetcher.nowOrWhenFetched(url, function(ok, body, xhr) {
if (!ok) {
console.log("Oops, something happened and couldn't fetch data");
} else {
var me = $rdf.sym('http://bshambaugh.rww.io');
var contains = LDP('contains')
console.log(contains)
var friend = store.each(me, contains, undefined)
// Any one person
// do something with the data in the store (see below)
for (var i=0; i<friend.length;i++) {
console.log(friend[i])
}
console.log("I fetched the data");
}
})
Run the code:
node exp6.js
To get the results:
NamedNode { uri: 'http://www.w3.org/ns/ldp#contains' }
NamedNode { uri: 'http://bshambaugh.rww.io/.acl' }
NamedNode { uri: 'http://bshambaugh.rww.io/profile/' }
NamedNode { uri: 'http://bshambaugh.rww.io/storage/' }
I fetched the data
Comments about using rdflib.js to backup and migrate ldp server content
I would like to use rdflib.js to recursively create a backup of a linked data platform server.
- I managed to get the child containers with the sixth experiment with rdflib.js
- I need to also find any contained RDF files (some other term in the ldp vocab.)
- I need to also pull all the content for each ldp container)
- Then i need to do http requests to ldp server of my choice.
- and rewrite the ldp server uris appropriately (move from one content to another)
- I managed to get the child containers with the sixth experiment with rdflib.js
- I need to also find any contained RDF files (some other term in the ldp vocab.)
- I need to also pull all the content for each ldp container)
- Then i need to do http requests to ldp server of my choice.
- and rewrite the ldp server uris appropriately (move from one content to another)
Getting started with rdflib.js -- Second experiment
Create a new file in the rdflibexperiments directory:
vim exp2.js
Add the contents:
var $rdf = require('../rdflib.js');
var uri = 'http://bshambaugh.rww.io/profile/card'
var body = '<a> <b> <c> .'
var mimeType = 'text/turtle'
var store = $rdf.graph()
try {
$rdf.parse(body, store, uri, mimeType)
} catch (err) {
console.log(err)
}
console.log(store);
Run the contents:
first in vim: wq!
second from the prompt: node exp2.js
To get the results:
var $rdf = require('../rdflib.js');
var uri = 'http://bshambaugh.rww.io/profile/card'
var body = '<a> <b> <c> .'
var mimeType = 'text/turtle'
var store = $rdf.graph()
try {
$rdf.parse(body, store, uri, mimeType)
} catch (err) {
console.log(err)
}
console.log(store);
vim exp2.js
Add the contents:
var $rdf = require('../rdflib.js');
var uri = 'http://bshambaugh.rww.io/profile/card'
var body = '<a> <b> <c> .'
var mimeType = 'text/turtle'
var store = $rdf.graph()
try {
$rdf.parse(body, store, uri, mimeType)
} catch (err) {
console.log(err)
}
console.log(store);
Run the contents:
first in vim: wq!
second from the prompt: node exp2.js
To get the results:
var $rdf = require('../rdflib.js');
var uri = 'http://bshambaugh.rww.io/profile/card'
var body = '<a> <b> <c> .'
var mimeType = 'text/turtle'
var store = $rdf.graph()
try {
$rdf.parse(body, store, uri, mimeType)
} catch (err) {
console.log(err)
}
console.log(store);
Getting started with rdflib.js -- my first experiment
I cloned the github repository with:
git clone https://github.com/linkeddata/rdflib.js.git
then I followed the following steps:
cd rdflib.js
sudo su
npm install
make all
npm install -g jquery
npm install -g xmlhttprequest
I was inspired by the instructions at:
https://github.com/linkeddata/rdflib.js/
In separate directory at the same level of hierarchy on the directory tree I created a directory with the name "rdflibexperiments":
cd ..
mkdir rdflibexperiments
cd rdflibexperiments
Then I created a file called "exp1.js":
vim exp1.js
With the contents:
var $rdf = require('../rdflib.js');
var FOAF = $rdf.Namespace("http://xmlns.com/foaf/0.1/");
var foafKnows = FOAF('knows');
console.log(foafKnows);
Running this with the command
node exp1.js
gave
NamedNode { uri: 'http://xmlns.com/foaf/0.1/knows' }
The contents of the file was inspired by:
https://github.com/solid/solid-tutorial-rdflib.js
with a friendly reminder from:
https://github.com/okcoders/ok-coders-summer-2014/blob/master/10-node/10-examples/calculator.js
git clone https://github.com/linkeddata/rdflib.js.git
then I followed the following steps:
cd rdflib.js
sudo su
npm install
make all
npm install -g jquery
npm install -g xmlhttprequest
I was inspired by the instructions at:
https://github.com/linkeddata/rdflib.js/
In separate directory at the same level of hierarchy on the directory tree I created a directory with the name "rdflibexperiments":
cd ..
mkdir rdflibexperiments
cd rdflibexperiments
Then I created a file called "exp1.js":
vim exp1.js
With the contents:
var $rdf = require('../rdflib.js');
var FOAF = $rdf.Namespace("http://xmlns.com/foaf/0.1/");
var foafKnows = FOAF('knows');
console.log(foafKnows);
Running this with the command
node exp1.js
gave
NamedNode { uri: 'http://xmlns.com/foaf/0.1/knows' }
The contents of the file was inspired by:
https://github.com/solid/solid-tutorial-rdflib.js
with a friendly reminder from:
https://github.com/okcoders/ok-coders-summer-2014/blob/master/10-node/10-examples/calculator.js
Monday, May 30, 2016
wget and curl on bshambaugh.rww.io
Using curl pulls in the parent container:
curl -i -H "Accept: text/turtle" http://bshambaugh.rww.io
HTTP/1.1 200 OK
Date: Tue, 31 May 2016 01:49:38 GMT
Server: Apache/2.2.22 (Ubuntu)
X-Powered-By: PHP/5.3.10-1ubuntu3.22
Set-Cookie: SID=a0kehvcpo0pctkrlijd1t2fsb0; expires=Sun, 30-May-2021 01:49:38 GMT; path=/; domain=.rww.io
Expires: Thu, 19 Nov 1981 08:52:00 GMT
Cache-Control: max-age=0
Pragma: no-cache
User: dns:107.196.158.16
Set-Cookie: showMetaFiles=deleted; expires=Thu, 01-Jan-1970 00:00:01 GMT
Link: <http://bshambaugh.rww.io/.acl>; rel=acl
MS-Author-Via: DAV, SPARQL
Allow: GET, PUT, POST, OPTIONS, HEAD, MKCOL, DELETE, PATCH
Accept-Patch: application/json, application/sparql-update
Accept-Post: text/turtle;charset=utf-8,text/n3;charset=utf-8,text/nt;charset=utf-8,text/css;charset=utf-8,text/html;charset=utf-8,text/javascript;charset=utf-8,text/plain;charset=utf-8,application/rdf+xml;charset=utf-8,application/json;charset=utf-8,multipart/form-data,image/jpeg,image/jpeg,image/png,image/gif,font/otf
Vary: Accept,Origin,If-Modified-Since,If-None-Match
Link: <http://www.w3.org/ns/ldp#BasicContainer>; rel="type"
Link: <http://www.w3.org/ns/ldp#Resource>; rel="type"
Link: <http://bshambaugh.rww.io/.meta>; rel=meta
Link: <http://bshambaugh.rww.io/?p=1>; rel='first'
Link: <http://bshambaugh.rww.io/?p=1>; rel='last'
ETag: "c6d5ed3581b8c61ff70775a7e43a3cb8"
Last-Modified: Tue, 07 Oct 2014 03:06:33 GMT
Updates-Via: ws://bshambaugh.rww.io:81
Triples: 17
Content-Length: 891
Content-Type: text/turtle
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
<>
a <http://www.w3.org/ns/ldp#BasicContainer>, <http://www.w3.org/ns/ldp#Container>, <http://www.w3.org/ns/posix/stat#Directory> ;
<http://www.w3.org/ns/ldp#contains> <.acl>, <profile/>, <storage/> ;
<http://www.w3.org/ns/posix/stat#mtime> 1412651193 ;
<http://www.w3.org/ns/posix/stat#size> 4096 .
<.acl>
a <http://www.w3.org/2000/01/rdf-schema#Resource> ;
<http://www.w3.org/ns/posix/stat#mtime> 1412651193 ;
<http://www.w3.org/ns/posix/stat#size> 577 .
<profile/>
a <http://www.w3.org/ns/posix/stat#Directory> ;
<http://www.w3.org/ns/posix/stat#mtime> 1412651156 ;
<http://www.w3.org/ns/posix/stat#size> 4096 .
<storage/>
a <http://www.w3.org/ns/posix/stat#Directory> ;
<http://www.w3.org/ns/posix/stat#mtime> 1422986228 ;
<http://www.w3.org/ns/posix/stat#size> 4096 .
But what about recursively pulling in child containers? A discussion on the marmotta users list tells me this is not supported by the Linked Data Platform standard.
https://lists.apache.org/thread.html/Zbr0lhfsuf2z183
In any case,, I tried this with wget in the same way I would target a normal web server. It does not appear to behave the same way.
wget -mkq -np -nH -D bshambaugh.rww.io http://bshambaugh.rww.io
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
<>
a <http://www.w3.org/ns/ldp#BasicContainer>, <http://www.w3.org/ns/ldp#Container>, <http://www.w3.org/ns/posix/stat#Directory> ;
<http://www.w3.org/ns/ldp#contains> <.acl>, <profile/>, <storage/> ;
<http://www.w3.org/ns/posix/stat#mtime> 1412651193 ;
<http://www.w3.org/ns/posix/stat#size> 4096 .
<.acl>
a <http://www.w3.org/2000/01/rdf-schema#Resource> ;
<http://www.w3.org/ns/posix/stat#mtime> 1412651193 ;
<http://www.w3.org/ns/posix/stat#size> 577 .
<profile/>
a <http://www.w3.org/ns/posix/stat#Directory> ;
<http://www.w3.org/ns/posix/stat#mtime> 1412651156 ;
<http://www.w3.org/ns/posix/stat#size> 4096 .
<storage/>
a <http://www.w3.org/ns/posix/stat#Directory> ;
<http://www.w3.org/ns/posix/stat#mtime> 1422986228 ;
<http://www.w3.org/ns/posix/stat#size> 4096 .
curl -i -H "Accept: text/turtle" http://bshambaugh.rww.io
HTTP/1.1 200 OK
Date: Tue, 31 May 2016 01:49:38 GMT
Server: Apache/2.2.22 (Ubuntu)
X-Powered-By: PHP/5.3.10-1ubuntu3.22
Set-Cookie: SID=a0kehvcpo0pctkrlijd1t2fsb0; expires=Sun, 30-May-2021 01:49:38 GMT; path=/; domain=.rww.io
Expires: Thu, 19 Nov 1981 08:52:00 GMT
Cache-Control: max-age=0
Pragma: no-cache
User: dns:107.196.158.16
Set-Cookie: showMetaFiles=deleted; expires=Thu, 01-Jan-1970 00:00:01 GMT
Link: <http://bshambaugh.rww.io/.acl>; rel=acl
MS-Author-Via: DAV, SPARQL
Allow: GET, PUT, POST, OPTIONS, HEAD, MKCOL, DELETE, PATCH
Accept-Patch: application/json, application/sparql-update
Accept-Post: text/turtle;charset=utf-8,text/n3;charset=utf-8,text/nt;charset=utf-8,text/css;charset=utf-8,text/html;charset=utf-8,text/javascript;charset=utf-8,text/plain;charset=utf-8,application/rdf+xml;charset=utf-8,application/json;charset=utf-8,multipart/form-data,image/jpeg,image/jpeg,image/png,image/gif,font/otf
Vary: Accept,Origin,If-Modified-Since,If-None-Match
Link: <http://www.w3.org/ns/ldp#BasicContainer>; rel="type"
Link: <http://www.w3.org/ns/ldp#Resource>; rel="type"
Link: <http://bshambaugh.rww.io/.meta>; rel=meta
Link: <http://bshambaugh.rww.io/?p=1>; rel='first'
Link: <http://bshambaugh.rww.io/?p=1>; rel='last'
ETag: "c6d5ed3581b8c61ff70775a7e43a3cb8"
Last-Modified: Tue, 07 Oct 2014 03:06:33 GMT
Updates-Via: ws://bshambaugh.rww.io:81
Triples: 17
Content-Length: 891
Content-Type: text/turtle
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
<>
a <http://www.w3.org/ns/ldp#BasicContainer>, <http://www.w3.org/ns/ldp#Container>, <http://www.w3.org/ns/posix/stat#Directory> ;
<http://www.w3.org/ns/ldp#contains> <.acl>, <profile/>, <storage/> ;
<http://www.w3.org/ns/posix/stat#mtime> 1412651193 ;
<http://www.w3.org/ns/posix/stat#size> 4096 .
<.acl>
a <http://www.w3.org/2000/01/rdf-schema#Resource> ;
<http://www.w3.org/ns/posix/stat#mtime> 1412651193 ;
<http://www.w3.org/ns/posix/stat#size> 577 .
<profile/>
a <http://www.w3.org/ns/posix/stat#Directory> ;
<http://www.w3.org/ns/posix/stat#mtime> 1412651156 ;
<http://www.w3.org/ns/posix/stat#size> 4096 .
<storage/>
a <http://www.w3.org/ns/posix/stat#Directory> ;
<http://www.w3.org/ns/posix/stat#mtime> 1422986228 ;
<http://www.w3.org/ns/posix/stat#size> 4096 .
But what about recursively pulling in child containers? A discussion on the marmotta users list tells me this is not supported by the Linked Data Platform standard.
https://lists.apache.org/thread.html/Zbr0lhfsuf2z183
In any case,, I tried this with wget in the same way I would target a normal web server. It does not appear to behave the same way.
wget -mkq -np -nH -D bshambaugh.rww.io http://bshambaugh.rww.io
@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
<>
a <http://www.w3.org/ns/ldp#BasicContainer>, <http://www.w3.org/ns/ldp#Container>, <http://www.w3.org/ns/posix/stat#Directory> ;
<http://www.w3.org/ns/ldp#contains> <.acl>, <profile/>, <storage/> ;
<http://www.w3.org/ns/posix/stat#mtime> 1412651193 ;
<http://www.w3.org/ns/posix/stat#size> 4096 .
<.acl>
a <http://www.w3.org/2000/01/rdf-schema#Resource> ;
<http://www.w3.org/ns/posix/stat#mtime> 1412651193 ;
<http://www.w3.org/ns/posix/stat#size> 577 .
<profile/>
a <http://www.w3.org/ns/posix/stat#Directory> ;
<http://www.w3.org/ns/posix/stat#mtime> 1412651156 ;
<http://www.w3.org/ns/posix/stat#size> 4096 .
<storage/>
a <http://www.w3.org/ns/posix/stat#Directory> ;
<http://www.w3.org/ns/posix/stat#mtime> 1422986228 ;
<http://www.w3.org/ns/posix/stat#size> 4096 .
Subscribe to:
Posts (Atom)