diff --git a/.github/workflows/build11.yml b/.github/workflows/build11.yml new file mode 100644 index 00000000000..a0a0012d20d --- /dev/null +++ b/.github/workflows/build11.yml @@ -0,0 +1,14 @@ +name: succed job 'build (11)' + +on: pull_request + +jobs: + build: + strategy: + fail-fast: true + matrix: + jdk: [ 11 ] + runs-on: ubuntu-latest + steps: + - name: Always Succeed + run: true \ No newline at end of file diff --git a/.github/workflows/develop-status.yml b/.github/workflows/develop-status.yml index cb782abfc29..32d63b0a6f9 100644 --- a/.github/workflows/develop-status.yml +++ b/.github/workflows/develop-status.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - jdk: [11, 17] + jdk: [17] steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/main-status.yml b/.github/workflows/main-status.yml index ba8fd33f39c..5fdf7c63427 100644 --- a/.github/workflows/main-status.yml +++ b/.github/workflows/main-status.yml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - jdk: [11, 17] + jdk: [17] steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/pr-verify.yml b/.github/workflows/pr-verify.yml index 29edc6b64e3..6648a34ee5a 100644 --- a/.github/workflows/pr-verify.yml +++ b/.github/workflows/pr-verify.yml @@ -19,7 +19,7 @@ jobs: - name: Set up JDK uses: actions/setup-java@v4 with: - java-version: 11 + java-version: 17 distribution: 'temurin' cache: maven - name: Check formatting @@ -37,7 +37,7 @@ jobs: - name: Set up JDK uses: actions/setup-java@v4 with: - java-version: 11 + java-version: 17 distribution: 'temurin' cache: maven - name: Compile (mvn clean install) @@ -50,7 +50,7 @@ jobs: strategy: fail-fast: true matrix: - jdk: [ 11, 25 ] + jdk: [ 17, 25 ] steps: - uses: actions/checkout@v4 - name: Set up JDK @@ -78,7 +78,7 @@ jobs: - name: Set up JDK uses: actions/setup-java@v4 with: - java-version: 11 + java-version: 17 distribution: 'temurin' cache: maven - name: Build @@ -99,7 +99,7 @@ jobs: - name: Set up JDK uses: actions/setup-java@v4 with: - java-version: 11 + java-version: 17 distribution: 'temurin' cache: maven - name: Build @@ -120,7 +120,7 @@ jobs: - name: Set up JDK uses: actions/setup-java@v4 with: - java-version: 11 + java-version: 17 distribution: 'temurin' cache: maven - name: Run install @@ -137,7 +137,7 @@ jobs: - name: Set up JDK uses: actions/setup-java@v4 with: - java-version: 11 + java-version: 17 distribution: 'temurin' cache: maven - name: Install dependencies diff --git a/pom.xml b/pom.xml index 06c64e89308..1748eb7e057 100644 --- a/pom.xml +++ b/pom.xml @@ -42,6 +42,7 @@ core tools spring-components + spring6-components testsuites compliance examples @@ -806,6 +807,22 @@ solr-core ${solr.version} + + + javax.validation + validation-api + 2.0.1.Final + + + javax.annotation + javax.annotation-api + 1.3.2 + + + ch.qos.logback + logback-core + ${logback.version} + @@ -1039,6 +1056,8 @@ javax.servlet:jstl:*:*:provided javax.servlet:javax.servlet-api:*:*:test + + javax.*:*:*:*:provided @@ -1061,6 +1080,8 @@ org.opentest4j.* + + provided true @@ -1072,7 +1093,7 @@ org.codehaus.mojo extra-enforcer-rules - 1.6.1 + 1.11.0 diff --git a/site/content/documentation/programming/spring.md b/site/content/documentation/programming/spring.md index b6b6b99c764..35a1a2c18d4 100644 --- a/site/content/documentation/programming/spring.md +++ b/site/content/documentation/programming/spring.md @@ -17,11 +17,11 @@ To use RDF as the data backend of a spring application built with maven, use the ```xml org.eclipse.rdf4j - rdf4j-spring + rdf4j-spring6 ${rdf4j.version} ``` -... setting the property `rdf4j.version` is set to the RDF4J version you want (minimum `4.0.0`). +... setting the property `rdf4j.version` is set to the RDF4J version you want (minimum `5.3.0`). In order for the application to run, a repository has to be configured: diff --git a/spring-components/rdf4j-spring-demo/pom.xml b/spring-components/rdf4j-spring-demo/pom.xml index 011ac7048d6..192ad0d30ce 100644 --- a/spring-components/rdf4j-spring-demo/pom.xml +++ b/spring-components/rdf4j-spring-demo/pom.xml @@ -36,6 +36,12 @@ + + javax.annotation + javax.annotation-api + 1.3.2 + provided + diff --git a/spring-components/rdf4j-spring/pom.xml b/spring-components/rdf4j-spring/pom.xml index 8b422bc394d..13aa8666311 100644 --- a/spring-components/rdf4j-spring/pom.xml +++ b/spring-components/rdf4j-spring/pom.xml @@ -80,6 +80,13 @@ mockserver-junit-jupiter-no-dependencies test + + javax.validation + validation-api + 2.0.1.Final + provided + + diff --git a/spring6-components/pom.xml b/spring6-components/pom.xml new file mode 100644 index 00000000000..bca4a50b348 --- /dev/null +++ b/spring6-components/pom.xml @@ -0,0 +1,69 @@ + + + 4.0.0 + + org.eclipse.rdf4j + rdf4j + 5.3.0-SNAPSHOT + + pom + + spring6-boot-sparql-web + rdf4j-spring6 + rdf4j-spring6-demo + + + + 17 + 3.5.7 + + 6.2.12 + 2.0.17 + 1.5.20 + 2.24.3 + 12.0.29 + + rdf4j-spring6-components + RDF4J: Spring6 components + Components to use with Spring + + + + org.junit + junit-bom + ${junit.version} + pom + import + + + org.springframework.boot + spring-boot-dependencies + ${spring.boot.version} + pom + import + + + + + + org.springframework + spring-test + test + + + org.springframework + spring-jcl + + + + + + + + org.springframework.boot + spring-boot-maven-plugin + ${spring.boot.version} + + + + diff --git a/spring6-components/rdf4j-spring6-demo/README.md b/spring6-components/rdf4j-spring6-demo/README.md new file mode 100644 index 00000000000..a25c631f137 --- /dev/null +++ b/spring6-components/rdf4j-spring6-demo/README.md @@ -0,0 +1,14 @@ +# RDF4J-Spring Demo + +Small demo application for `rdf4j-spring`. + +The purpose of `rdf4j-spring` is to use an RDF4J repository as the data backend of a spring or spring boot application. + +To run the demo, do + +```$bash +mvn spring-boot:run +``` + +The program writes to stdout and exits. The class [ArtDemoCli](src/main/java/org/eclipse/rdf4j/spring.demo/ArtDemoCli.java) is a good starting point for looking at the code. + diff --git a/spring6-components/rdf4j-spring6-demo/pom.xml b/spring6-components/rdf4j-spring6-demo/pom.xml new file mode 100644 index 00000000000..697e62e0e60 --- /dev/null +++ b/spring6-components/rdf4j-spring6-demo/pom.xml @@ -0,0 +1,58 @@ + + + 4.0.0 + rdf4j-spring6-demo + RDF4J: Spring6 Demo + Demo of a spring-boot project using an RDF4J repo as its backend + + org.eclipse.rdf4j + rdf4j-spring6-components + 5.3.0-SNAPSHOT + + + + org.eclipse.rdf4j + rdf4j-spring6 + ${project.version} + + + org.springframework.boot + spring-boot-starter + + + org.springframework + spring-jcl + + + + + org.springframework.boot + spring-boot-starter-test + test + + + + + + org.springframework.boot + spring-boot-dependencies + ${spring.boot.version} + pom + import + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + + org.springframework.boot + spring-boot-maven-plugin + ${spring.boot.version} + + + + diff --git a/spring6-components/rdf4j-spring6-demo/src/main/java/org/eclipse/rdf4j/spring/demo/ArtDemoCli.java b/spring6-components/rdf4j-spring6-demo/src/main/java/org/eclipse/rdf4j/spring/demo/ArtDemoCli.java new file mode 100644 index 00000000000..537f39b0535 --- /dev/null +++ b/spring6-components/rdf4j-spring6-demo/src/main/java/org/eclipse/rdf4j/spring/demo/ArtDemoCli.java @@ -0,0 +1,93 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.demo; + +import java.util.Map; +import java.util.Set; + +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.spring.demo.model.Artist; +import org.eclipse.rdf4j.spring.demo.model.Painting; +import org.eclipse.rdf4j.spring.demo.service.ArtService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.CommandLineRunner; +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; + +/** + * Command line interface for the demo. Takes no parameters. It outputs the content of the demo repository, then adds + * some data and outputs the content of the repository again. + *

+ * Accessing the repository is done via the {@link ArtService} class, which just encapsulates accesses to the + * {@link org.eclipse.rdf4j.spring.demo.dao.PaintingDao} and {@link org.eclipse.rdf4j.spring.demo.dao.ArtistDao} + * classes. + * + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@SpringBootApplication +public class ArtDemoCli implements CommandLineRunner { + @Autowired + ArtService artService; + + public static void main(String[] args) { + SpringApplication.run(ArtDemoCli.class, args).close(); + } + + @Override + public void run(String... args) { + System.out.println("\nData read from 'artists.ttl':"); + Map> paintingsMap = artService.getPaintingsGroupedByArtist(); + listPaintingsByArtist(paintingsMap); + System.out.println("\nNow adding some data..."); + addPaintingWithArtist(); + System.out.println("\nReloaded data:"); + paintingsMap = artService.getPaintingsGroupedByArtist(); + listPaintingsByArtist(paintingsMap); + System.out.println("\n"); + listArtistsWithoutPaintings(); + System.out.println("\n"); + } + + private void addPaintingWithArtist() { + Artist a = new Artist(); + a.setFirstName("Jan"); + a.setLastName("Vermeer"); + IRI artistId = artService.addArtist(a); + Painting p = new Painting(); + p.setTitle("View of Delft"); + p.setTechnique("oil on canvas"); + p.setArtistId(artistId); + artService.addPainting(p); + } + + private void listPaintingsByArtist(Map> paintingsMap) { + for (Artist a : paintingsMap.keySet()) { + System.out.println(String.format("%s %s", a.getFirstName(), a.getLastName())); + for (Painting p : paintingsMap.get(a)) { + System.out.println(String.format("\t%s (%s)", p.getTitle(), p.getTechnique())); + } + } + } + + private void listArtistsWithoutPaintings() { + System.out.println("Artists without paintings:"); + Set a = artService.getArtistsWithoutPaintings(); + if (a.isEmpty()) { + System.out.println("\t[none]"); + } else { + for (Artist artist : a) { + System.out.println(String.format("%s %s", artist.getFirstName(), artist.getLastName())); + } + } + } +} diff --git a/spring6-components/rdf4j-spring6-demo/src/main/java/org/eclipse/rdf4j/spring/demo/ArtDemoConfig.java b/spring6-components/rdf4j-spring6-demo/src/main/java/org/eclipse/rdf4j/spring/demo/ArtDemoConfig.java new file mode 100644 index 00000000000..d9dda6ca27d --- /dev/null +++ b/spring6-components/rdf4j-spring6-demo/src/main/java/org/eclipse/rdf4j/spring/demo/ArtDemoConfig.java @@ -0,0 +1,58 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.demo; + +import org.eclipse.rdf4j.spring.RDF4JConfig; +import org.eclipse.rdf4j.spring.dao.RDF4JDao; +import org.eclipse.rdf4j.spring.demo.support.InitialDataInserter; +import org.eclipse.rdf4j.spring.support.DataInserter; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.*; +import org.springframework.core.io.Resource; + +/** + * Spring config for the demo. + *

+ * Here is what it does: + * + *

    + *
  • it imports {@link RDF4JConfig} which interprets the config properties (in our example, they are in + * application.properties) and registers a number of beans.
  • + *
  • it scans the org.eclipse.rdf4j.spring.demo.dao package, finds the DAOs, registers them as beans and + * injects their dependencies
  • + *
  • it configures the 'data inserter' beans, which read data from the 'artists.ttl' file and adds them to the + * repository at startup
  • + *
+ *

+ * See {@link org.eclipse.rdf4j.spring Rdf4J-Spring} for an overview and more pointers. + * + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@Configuration +@Import(RDF4JConfig.class) +@ComponentScan( + value = "org.eclipse.rdf4j.spring.demo.dao", includeFilters = @ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = RDF4JDao.class)) +public class ArtDemoConfig { + @Bean + public DataInserter getDataInserter() { + return new DataInserter(); + } + + @Bean + public InitialDataInserter getInitialDataInserter( + @Autowired DataInserter dataInserter, + @Value("classpath:/artists.ttl") Resource ttlFile) { + return new InitialDataInserter(dataInserter, ttlFile); + } +} diff --git a/spring6-components/rdf4j-spring6-demo/src/main/java/org/eclipse/rdf4j/spring/demo/dao/ArtistDao.java b/spring6-components/rdf4j-spring6-demo/src/main/java/org/eclipse/rdf4j/spring/demo/dao/ArtistDao.java new file mode 100644 index 00000000000..39c8c1b5209 --- /dev/null +++ b/spring6-components/rdf4j-spring6-demo/src/main/java/org/eclipse/rdf4j/spring/demo/dao/ArtistDao.java @@ -0,0 +1,127 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.demo.dao; + +import static org.eclipse.rdf4j.sparqlbuilder.constraint.Expressions.bound; +import static org.eclipse.rdf4j.sparqlbuilder.constraint.Expressions.not; +import static org.eclipse.rdf4j.sparqlbuilder.rdf.Rdf.iri; +import static org.eclipse.rdf4j.spring.demo.model.Artist.ARTIST_FIRST_NAME; +import static org.eclipse.rdf4j.spring.demo.model.Artist.ARTIST_ID; +import static org.eclipse.rdf4j.spring.demo.model.Artist.ARTIST_LAST_NAME; + +import java.util.Set; +import java.util.stream.Collectors; + +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.vocabulary.FOAF; +import org.eclipse.rdf4j.query.BindingSet; +import org.eclipse.rdf4j.sparqlbuilder.core.query.Queries; +import org.eclipse.rdf4j.spring.dao.SimpleRDF4JCRUDDao; +import org.eclipse.rdf4j.spring.dao.support.bindingsBuilder.MutableBindings; +import org.eclipse.rdf4j.spring.dao.support.sparql.NamedSparqlSupplier; +import org.eclipse.rdf4j.spring.demo.model.Artist; +import org.eclipse.rdf4j.spring.demo.model.EX; +import org.eclipse.rdf4j.spring.demo.model.Painting; +import org.eclipse.rdf4j.spring.support.RDF4JTemplate; +import org.eclipse.rdf4j.spring.util.QueryResultUtils; +import org.springframework.stereotype.Component; + +/** + * Class responsible for repository access for managing {@link Artist} entities. + *

+ * The class extends the {@link SimpleRDF4JCRUDDao}, providing capabilities for inserting and reading entities. + * + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@Component +public class ArtistDao extends SimpleRDF4JCRUDDao { + + public ArtistDao(RDF4JTemplate rdf4JTemplate) { + super(rdf4JTemplate); + } + + @Override + protected void populateIdBindings(MutableBindings bindingsBuilder, IRI iri) { + bindingsBuilder.add(ARTIST_ID, iri); + } + + @Override + protected void populateBindingsForUpdate(MutableBindings bindingsBuilder, Artist artist) { + bindingsBuilder + .add(ARTIST_FIRST_NAME, artist.getFirstName()) + .add(ARTIST_LAST_NAME, artist.getLastName()); + } + + @Override + protected Artist mapSolution(BindingSet querySolution) { + Artist artist = new Artist(); + artist.setId(QueryResultUtils.getIRI(querySolution, ARTIST_ID)); + artist.setFirstName(QueryResultUtils.getString(querySolution, ARTIST_FIRST_NAME)); + artist.setLastName(QueryResultUtils.getString(querySolution, ARTIST_LAST_NAME)); + return artist; + } + + @Override + protected String getReadQuery() { + return "prefix foaf: " + + "prefix ex: " + + "SELECT ?artist_id ?artist_firstName ?artist_lastName where {" + + "?artist_id a ex:Artist; " + + " foaf:firstName ?artist_firstName; " + + " foaf:surname ?artist_lastName ." + + " } "; + } + + @Override + protected NamedSparqlSupplier getInsertSparql(Artist artist) { + return NamedSparqlSupplier.of("insert", () -> Queries.INSERT(ARTIST_ID.isA(iri(EX.Artist)) + .andHas(iri(FOAF.FIRST_NAME), ARTIST_FIRST_NAME) + .andHas(iri(FOAF.SURNAME), ARTIST_LAST_NAME)) + .getQueryString()); + } + + @Override + protected IRI getInputId(Artist artist) { + if (artist.getId() == null) { + return getRdf4JTemplate().getNewUUID(); + } + return artist.getId(); + } + + static abstract class QUERY_KEYS { + public static final String ARTISTS_WITHOUT_PAINTINGS = "artists-without-paintings"; + } + + @Override + protected NamedSparqlSupplierPreparer prepareNamedSparqlSuppliers(NamedSparqlSupplierPreparer preparer) { + return preparer.forKey(QUERY_KEYS.ARTISTS_WITHOUT_PAINTINGS) + .supplySparql(Queries.SELECT( + ARTIST_ID) + .where( + ARTIST_ID.isA(iri(EX.Artist)) + .and(ARTIST_ID.has(iri(EX.creatorOf), Painting.PAINTING_ID).optional()) + .filter(not(bound(Painting.PAINTING_ID)))) + .getQueryString() + ); + } + + public Set getArtistsWithoutPaintings() { + return getNamedTupleQuery(QUERY_KEYS.ARTISTS_WITHOUT_PAINTINGS) + .evaluateAndConvert() + .toStream() + .map(bs -> QueryResultUtils.getIRI(bs, ARTIST_ID)) + .map(this::getById) + .collect(Collectors.toSet()); + } + +} diff --git a/spring6-components/rdf4j-spring6-demo/src/main/java/org/eclipse/rdf4j/spring/demo/dao/PaintingDao.java b/spring6-components/rdf4j-spring6-demo/src/main/java/org/eclipse/rdf4j/spring/demo/dao/PaintingDao.java new file mode 100644 index 00000000000..14c044fb745 --- /dev/null +++ b/spring6-components/rdf4j-spring6-demo/src/main/java/org/eclipse/rdf4j/spring/demo/dao/PaintingDao.java @@ -0,0 +1,105 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.demo.dao; + +import static org.eclipse.rdf4j.sparqlbuilder.rdf.Rdf.iri; +import static org.eclipse.rdf4j.spring.demo.model.Painting.PAINTING_ARTIST_ID; +import static org.eclipse.rdf4j.spring.demo.model.Painting.PAINTING_ID; +import static org.eclipse.rdf4j.spring.demo.model.Painting.PAINTING_LABEL; +import static org.eclipse.rdf4j.spring.demo.model.Painting.PAINTING_TECHNIQUE; + +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.vocabulary.RDFS; +import org.eclipse.rdf4j.query.BindingSet; +import org.eclipse.rdf4j.sparqlbuilder.core.query.Queries; +import org.eclipse.rdf4j.spring.dao.RDF4JDao; +import org.eclipse.rdf4j.spring.dao.SimpleRDF4JCRUDDao; +import org.eclipse.rdf4j.spring.dao.support.bindingsBuilder.MutableBindings; +import org.eclipse.rdf4j.spring.dao.support.sparql.NamedSparqlSupplier; +import org.eclipse.rdf4j.spring.demo.model.EX; +import org.eclipse.rdf4j.spring.demo.model.Painting; +import org.eclipse.rdf4j.spring.support.RDF4JTemplate; +import org.eclipse.rdf4j.spring.util.QueryResultUtils; +import org.springframework.stereotype.Component; + +/** + * Class responsible for repository access for managing {@link Painting} entities. + *

+ * The class extends the {@link SimpleRDF4JCRUDDao}, providing capabilities for inserting and reading entities. + * + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@Component +public class PaintingDao extends SimpleRDF4JCRUDDao { + + public PaintingDao(RDF4JTemplate rdf4JTemplate) { + super(rdf4JTemplate); + } + + @Override + protected void populateIdBindings(MutableBindings bindingsBuilder, IRI iri) { + bindingsBuilder.add(PAINTING_ID, iri); + } + + @Override + protected RDF4JDao.NamedSparqlSupplierPreparer prepareNamedSparqlSuppliers(NamedSparqlSupplierPreparer preparer) { + return null; + } + + @Override + protected Painting mapSolution(BindingSet querySolution) { + Painting painting = new Painting(); + painting.setId(QueryResultUtils.getIRI(querySolution, PAINTING_ID)); + painting.setTechnique(QueryResultUtils.getString(querySolution, PAINTING_TECHNIQUE)); + painting.setTitle(QueryResultUtils.getString(querySolution, PAINTING_LABEL)); + painting.setArtistId(QueryResultUtils.getIRI(querySolution, PAINTING_ARTIST_ID)); + return painting; + } + + @Override + protected String getReadQuery() { + return Queries.SELECT(PAINTING_ID, PAINTING_LABEL, PAINTING_TECHNIQUE, PAINTING_ARTIST_ID) + .where( + PAINTING_ID.isA(iri(EX.Painting)) + .andHas(iri(EX.technique), PAINTING_TECHNIQUE) + .andHas(iri(RDFS.LABEL), PAINTING_LABEL), + PAINTING_ARTIST_ID.has(iri(EX.creatorOf), PAINTING_ID)) + .getQueryString(); + } + + @Override + protected NamedSparqlSupplier getInsertSparql(Painting painting) { + return NamedSparqlSupplier.of("insert", () -> Queries.INSERT( + PAINTING_ID.isA(iri(EX.Painting)) + .andHas(iri(EX.technique), PAINTING_TECHNIQUE) + .andHas(iri(RDFS.LABEL), PAINTING_LABEL), + PAINTING_ARTIST_ID.has(iri(EX.creatorOf), PAINTING_ID)) + .getQueryString()); + } + + @Override + protected void populateBindingsForUpdate(MutableBindings bindingsBuilder, Painting painting) { + bindingsBuilder + .add(PAINTING_LABEL, painting.getTitle()) + .add(PAINTING_TECHNIQUE, painting.getTechnique()) + .add(PAINTING_ARTIST_ID, painting.getArtistId()); + } + + @Override + protected IRI getInputId(Painting painting) { + if (painting.getId() == null) { + return getRdf4JTemplate().getNewUUID(); + } + return painting.getId(); + } +} diff --git a/spring6-components/rdf4j-spring6-demo/src/main/java/org/eclipse/rdf4j/spring/demo/model/Artist.java b/spring6-components/rdf4j-spring6-demo/src/main/java/org/eclipse/rdf4j/spring/demo/model/Artist.java new file mode 100644 index 00000000000..5946a71f998 --- /dev/null +++ b/spring6-components/rdf4j-spring6-demo/src/main/java/org/eclipse/rdf4j/spring/demo/model/Artist.java @@ -0,0 +1,72 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.demo.model; + +import java.util.Objects; + +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.sparqlbuilder.core.SparqlBuilder; +import org.eclipse.rdf4j.sparqlbuilder.core.Variable; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class Artist { + public static final Variable ARTIST_ID = SparqlBuilder.var("artist_id"); + public static final Variable ARTIST_FIRST_NAME = SparqlBuilder.var("artist_firstName"); + public static final Variable ARTIST_LAST_NAME = SparqlBuilder.var("artist_lastName"); + private IRI id; + private String firstName; + private String lastName; + + public String getFirstName() { + return firstName; + } + + public void setFirstName(String firstName) { + this.firstName = firstName; + } + + public String getLastName() { + return lastName; + } + + public void setLastName(String lastName) { + this.lastName = lastName; + } + + public IRI getId() { + return id; + } + + public void setId(IRI id) { + this.id = id; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Artist artist = (Artist) o; + return Objects.equals(id, artist.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } +} diff --git a/spring6-components/rdf4j-spring6-demo/src/main/java/org/eclipse/rdf4j/spring/demo/model/EX.java b/spring6-components/rdf4j-spring6-demo/src/main/java/org/eclipse/rdf4j/spring/demo/model/EX.java new file mode 100644 index 00000000000..2102af232f6 --- /dev/null +++ b/spring6-components/rdf4j-spring6-demo/src/main/java/org/eclipse/rdf4j/spring/demo/model/EX.java @@ -0,0 +1,44 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.demo.model; + +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.Namespace; +import org.eclipse.rdf4j.model.impl.SimpleNamespace; +import org.eclipse.rdf4j.model.util.Values; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class EX { + private static final Namespace base = new SimpleNamespace("ex", "http://example.org/"); + public static final IRI Artist = Values.iri(base, "Artist"); + public static final IRI Gallery = Values.iri(base, "Gallery"); + public static final IRI Painting = Values.iri(base, "Painting"); + public static final IRI Picasso = Values.iri(base, "Picasso"); + public static final IRI VanGogh = Values.iri(base, "VanGogh"); + public static final IRI Rembrandt = Values.iri(base, "Rembrandt"); + public static final IRI street = Values.iri(base, "street"); + public static final IRI city = Values.iri(base, "city"); + public static final IRI country = Values.iri(base, "country"); + public static final IRI creatorOf = Values.iri(base, "creatorOf"); + public static final IRI technique = Values.iri(base, "technique"); + public static final IRI starryNight = Values.iri(base, "starryNight"); + public static final IRI sunflowers = Values.iri(base, "sunflowers"); + public static final IRI potatoEaters = Values.iri(base, "potatoEaters"); + public static final IRI guernica = Values.iri(base, "guernica"); + + public static IRI of(String localName) { + return Values.iri(base, localName); + } +} diff --git a/spring6-components/rdf4j-spring6-demo/src/main/java/org/eclipse/rdf4j/spring/demo/model/Painting.java b/spring6-components/rdf4j-spring6-demo/src/main/java/org/eclipse/rdf4j/spring/demo/model/Painting.java new file mode 100644 index 00000000000..a81efc79002 --- /dev/null +++ b/spring6-components/rdf4j-spring6-demo/src/main/java/org/eclipse/rdf4j/spring/demo/model/Painting.java @@ -0,0 +1,83 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.demo.model; + +import java.util.Objects; + +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.sparqlbuilder.core.SparqlBuilder; +import org.eclipse.rdf4j.sparqlbuilder.core.Variable; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class Painting { + public static final Variable PAINTING_ID = SparqlBuilder.var("painting_id"); + public static final Variable PAINTING_ARTIST_ID = SparqlBuilder.var("painting_artist_id"); + public static final Variable PAINTING_TECHNIQUE = SparqlBuilder.var("painting_technique"); + public static final Variable PAINTING_LABEL = SparqlBuilder.var("painting_label"); + + private IRI id; + private String title; + private String technique; + private IRI artistId; + + public IRI getId() { + return id; + } + + public void setId(IRI id) { + this.id = id; + } + + public String getTitle() { + return title; + } + + public void setTitle(String title) { + this.title = title; + } + + public String getTechnique() { + return technique; + } + + public void setTechnique(String technique) { + this.technique = technique; + } + + public IRI getArtistId() { + return artistId; + } + + public void setArtistId(IRI artistId) { + this.artistId = artistId; + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Painting painting = (Painting) o; + return Objects.equals(id, painting.id); + } + + @Override + public int hashCode() { + return Objects.hash(id); + } +} diff --git a/spring6-components/rdf4j-spring6-demo/src/main/java/org/eclipse/rdf4j/spring/demo/service/ArtService.java b/spring6-components/rdf4j-spring6-demo/src/main/java/org/eclipse/rdf4j/spring/demo/service/ArtService.java new file mode 100644 index 00000000000..75972e6c2f3 --- /dev/null +++ b/spring6-components/rdf4j-spring6-demo/src/main/java/org/eclipse/rdf4j/spring/demo/service/ArtService.java @@ -0,0 +1,96 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.demo.service; + +import static java.util.stream.Collectors.groupingBy; +import static java.util.stream.Collectors.toSet; + +import java.util.List; +import java.util.Map; +import java.util.Set; + +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.spring.demo.dao.ArtistDao; +import org.eclipse.rdf4j.spring.demo.dao.PaintingDao; +import org.eclipse.rdf4j.spring.demo.model.Artist; +import org.eclipse.rdf4j.spring.demo.model.Painting; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; +import org.springframework.transaction.annotation.Transactional; + +/** + * Uses {@link ArtistDao} and {@link PaintingDao} to query and manipulate the repository. + * + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@Component +public class ArtService { + @Autowired + private ArtistDao artistDao; + + @Autowired + private PaintingDao paintingDao; + + @Transactional + public Artist createArtist(String firstName, String lastName) { + Artist artist = new Artist(); + artist.setFirstName(firstName); + artist.setLastName(lastName); + return artistDao.save(artist); + } + + @Transactional + public Painting createPainting(String title, String technique, IRI artist) { + Painting painting = new Painting(); + painting.setTitle(title); + painting.setTechnique(technique); + painting.setArtistId(artist); + return paintingDao.save(painting); + } + + @Transactional + public List getPaintings() { + return paintingDao.list(); + } + + @Transactional + public List getArtists() { + return artistDao.list(); + } + + @Transactional + public Set getArtistsWithoutPaintings() { + return artistDao.getArtistsWithoutPaintings(); + } + + @Transactional + public Map> getPaintingsGroupedByArtist() { + List paintings = paintingDao.list(); + return paintings + .stream() + .collect(groupingBy( + p -> artistDao.getById(p.getArtistId()), + toSet())); + } + + @Transactional + public IRI addArtist(Artist artist) { + return artistDao.saveAndReturnId(artist); + } + + @Transactional + public IRI addPainting(Painting painting) { + return paintingDao.saveAndReturnId(painting); + } + +} diff --git a/spring6-components/rdf4j-spring6-demo/src/main/java/org/eclipse/rdf4j/spring/demo/support/InitialDataInserter.java b/spring6-components/rdf4j-spring6-demo/src/main/java/org/eclipse/rdf4j/spring/demo/support/InitialDataInserter.java new file mode 100644 index 00000000000..6fe04205eb1 --- /dev/null +++ b/spring6-components/rdf4j-spring6-demo/src/main/java/org/eclipse/rdf4j/spring/demo/support/InitialDataInserter.java @@ -0,0 +1,38 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.demo.support; + +import org.eclipse.rdf4j.spring.support.DataInserter; +import org.springframework.core.io.Resource; + +import jakarta.annotation.PostConstruct; + +/** + * Inserts data from the specified TTL file into the repository at startup. + * + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class InitialDataInserter { + DataInserter dataInserter; + Resource ttlFile; + + public InitialDataInserter(DataInserter dataInserter, Resource ttlFile) { + this.dataInserter = dataInserter; + this.ttlFile = ttlFile; + } + + @PostConstruct + public void insertDemoData() { + this.dataInserter.insertData(ttlFile); + } +} diff --git a/spring6-components/rdf4j-spring6-demo/src/main/resources/application.properties b/spring6-components/rdf4j-spring6-demo/src/main/resources/application.properties new file mode 100644 index 00000000000..fbaab9a326b --- /dev/null +++ b/spring6-components/rdf4j-spring6-demo/src/main/resources/application.properties @@ -0,0 +1,6 @@ +rdf4j.spring.repository.inmemory.enabled=true +rdf4j.spring.pool.enabled=true +rdf4j.spring.operationcache.enabled=false +rdf4j.spring.operationlog.enabled=false +rdf4j.spring.resultcache.enabled=false +rdf4j.spring.tx.enabled=true \ No newline at end of file diff --git a/spring6-components/rdf4j-spring6-demo/src/main/resources/artists.ttl b/spring6-components/rdf4j-spring6-demo/src/main/resources/artists.ttl new file mode 100644 index 00000000000..041b4f9597c --- /dev/null +++ b/spring6-components/rdf4j-spring6-demo/src/main/resources/artists.ttl @@ -0,0 +1,38 @@ +@prefix ex: . +@prefix foaf: . +@prefix rdfs: . + +ex:Picasso a ex:Artist ; + foaf:firstName "Pablo" ; + foaf:surname "Picasso"; + ex:creatorOf ex:guernica ; + ex:homeAddress _:node1 . + +_:node1 ex:street "31 Art Gallery" ; + ex:city "Madrid" ; + ex:country "Spain" . + +ex:guernica a ex:Painting ; + rdfs:label "Guernica"; + ex:technique "oil on canvas". + +ex:VanGogh a ex:Artist ; + foaf:firstName "Vincent" ; + foaf:surname "van Gogh"; + ex:creatorOf ex:starryNight, ex:sunflowers, ex:potatoEaters . + +ex:starryNight a ex:Painting ; + ex:technique "oil on canvas"; + rdfs:label "Starry Night" . + +ex:sunflowers a ex:Painting ; + ex:technique "oil on canvas"; + rdfs:label "Sunflowers" . + +ex:potatoEaters a ex:Painting ; + ex:technique "oil on canvas"; + rdfs:label "The Potato Eaters" . + +ex:Rembrandt a ex:Artist ; + foaf:firstName "Rembrandt Harmensz" ; + foaf:surname "van Rijn". \ No newline at end of file diff --git a/spring6-components/rdf4j-spring6-demo/src/main/resources/logback.xml b/spring6-components/rdf4j-spring6-demo/src/main/resources/logback.xml new file mode 100644 index 00000000000..cc3c5b4b7b3 --- /dev/null +++ b/spring6-components/rdf4j-spring6-demo/src/main/resources/logback.xml @@ -0,0 +1,23 @@ + + + + + + %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n + + + + + + + + diff --git a/spring6-components/rdf4j-spring6-demo/src/test/java/org/eclipse/rdf4j/spring/demo/TestConfig.java b/spring6-components/rdf4j-spring6-demo/src/test/java/org/eclipse/rdf4j/spring/demo/TestConfig.java new file mode 100644 index 00000000000..4c857e2a395 --- /dev/null +++ b/spring6-components/rdf4j-spring6-demo/src/test/java/org/eclipse/rdf4j/spring/demo/TestConfig.java @@ -0,0 +1,33 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.demo; + +import org.eclipse.rdf4j.spring.support.DataInserter; +import org.eclipse.rdf4j.spring.test.RDF4JTestConfig; +import org.springframework.boot.test.context.TestConfiguration; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.Import; +import org.springframework.transaction.annotation.EnableTransactionManagement; + +@TestConfiguration +@EnableTransactionManagement +@Import(RDF4JTestConfig.class) +@ComponentScan("org.eclipse.rdf4j.spring.demo.*") +public class TestConfig { + + @Bean + DataInserter getDataInserter() { + return new DataInserter(); + } + +} diff --git a/spring6-components/rdf4j-spring6-demo/src/test/java/org/eclipse/rdf4j/spring/demo/dao/ArtistDaoTests.java b/spring6-components/rdf4j-spring6-demo/src/test/java/org/eclipse/rdf4j/spring/demo/dao/ArtistDaoTests.java new file mode 100644 index 00000000000..0018facbcfb --- /dev/null +++ b/spring6-components/rdf4j-spring6-demo/src/test/java/org/eclipse/rdf4j/spring/demo/dao/ArtistDaoTests.java @@ -0,0 +1,83 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.demo.dao; + +import java.util.Set; + +import org.eclipse.rdf4j.spring.demo.TestConfig; +import org.eclipse.rdf4j.spring.demo.model.Artist; +import org.eclipse.rdf4j.spring.demo.model.EX; +import org.eclipse.rdf4j.spring.support.DataInserter; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.core.io.Resource; +import org.springframework.test.annotation.DirtiesContext; +import org.springframework.test.context.TestPropertySource; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.transaction.annotation.Transactional; + +@Transactional +@SpringJUnitConfig(classes = { TestConfig.class }) +@TestPropertySource("classpath:application.properties") +@TestPropertySource( + properties = { + "rdf4j.spring.repository.inmemory.enabled=true", + "rdf4j.spring.repository.inmemory.use-shacl-sail=true", + "rdf4j.spring.tx.enabled=true", + "rdf4j.spring.resultcache.enabled=false", + "rdf4j.spring.operationcache.enabled=false", + "rdf4j.spring.pool.enabled=true", + "rdf4j.spring.pool.max-connections=2" + }) +@DirtiesContext +public class ArtistDaoTests { + + @Autowired + private ArtistDao artistDao; + + @BeforeAll + public static void insertTestData( + @Autowired DataInserter dataInserter, + @Value("classpath:artists.ttl") Resource dataFile) { + dataInserter.insertData(dataFile); + } + + @Test + public void testReadArtist() { + Artist a = artistDao.getById(EX.Picasso); + Assertions.assertEquals("Picasso", a.getLastName()); + Assertions.assertEquals("Pablo", a.getFirstName()); + } + + @Test + public void testWriteArtist() { + Artist a = new Artist(); + a.setFirstName("Salvador"); + a.setLastName("DalĂ­"); + Artist savedDali = artistDao.save(a); + Assertions.assertNotNull(savedDali.getId()); + Artist reloadedDali = artistDao.getById(savedDali.getId()); + Assertions.assertEquals(savedDali, reloadedDali); + } + + @Test + public void testReadArtistWithoutPaintings() { + Set withoutPaintings = artistDao.getArtistsWithoutPaintings(); + Assertions.assertEquals(1, withoutPaintings.size()); + Artist a = artistDao.getById(EX.Rembrandt); + Assertions.assertTrue(withoutPaintings.contains(a)); + } + +} diff --git a/spring6-components/rdf4j-spring6-demo/src/test/resources/logback.xml b/spring6-components/rdf4j-spring6-demo/src/test/resources/logback.xml new file mode 100644 index 00000000000..2c07d55c40c --- /dev/null +++ b/spring6-components/rdf4j-spring6-demo/src/test/resources/logback.xml @@ -0,0 +1,13 @@ + + + + + %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n + + + + + + + + diff --git a/spring6-components/rdf4j-spring6/pom.xml b/spring6-components/rdf4j-spring6/pom.xml new file mode 100644 index 00000000000..34d17e19235 --- /dev/null +++ b/spring6-components/rdf4j-spring6/pom.xml @@ -0,0 +1,89 @@ + + + 4.0.0 + + org.eclipse.rdf4j + rdf4j-spring6-components + 5.3.0-SNAPSHOT + + rdf4j-spring6 + RDF4J: Spring6 + Spring6 integration for RDF4J + jar + + + + org.eclipse.rdf4j + rdf4j-runtime + ${project.version} + pom + + + org.eclipse.rdf4j + rdf4j-sparqlbuilder + ${project.version} + + + + org.springframework.boot + spring-boot-starter-validation + + + org.springframework.boot + spring-boot-starter-web + + + org.springframework.boot + spring-boot-starter-tomcat + + + + + org.springframework.boot + spring-boot-starter-test + test + + + org.springframework + spring-jcl + + + + + com.google.guava + guava + + + org.springframework.boot + spring-boot-configuration-processor + true + + + org.springframework + spring-tx + + + org.hibernate.validator + hibernate-validator + + + org.apache.commons + commons-pool2 + 2.8.1 + + + org.mock-server + mockserver-junit-jupiter-no-dependencies + test + + + + + + org.springframework.boot + spring-boot-maven-plugin + ${spring.boot.version} + + + + diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/RDF4JConfig.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/RDF4JConfig.java new file mode 100644 index 00000000000..e8e73500fa0 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/RDF4JConfig.java @@ -0,0 +1,135 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring; + +import java.lang.invoke.MethodHandles; + +import org.apache.commons.pool2.impl.GenericObjectPoolConfig; +import org.eclipse.rdf4j.common.annotation.Experimental; +import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.eclipse.rdf4j.spring.operationcache.CachingOperationInstantiator; +import org.eclipse.rdf4j.spring.operationcache.OperationCacheProperties; +import org.eclipse.rdf4j.spring.operationlog.LoggingRepositoryConnectionFactory; +import org.eclipse.rdf4j.spring.operationlog.log.OperationLog; +import org.eclipse.rdf4j.spring.pool.PoolProperties; +import org.eclipse.rdf4j.spring.pool.PooledRepositoryConnectionFactory; +import org.eclipse.rdf4j.spring.resultcache.CachingRepositoryConnectionFactory; +import org.eclipse.rdf4j.spring.resultcache.ResultCacheProperties; +import org.eclipse.rdf4j.spring.support.DirectOperationInstantiator; +import org.eclipse.rdf4j.spring.support.OperationInstantiator; +import org.eclipse.rdf4j.spring.support.RDF4JTemplate; +import org.eclipse.rdf4j.spring.support.UUIDSource; +import org.eclipse.rdf4j.spring.support.connectionfactory.DirectRepositoryConnectionFactory; +import org.eclipse.rdf4j.spring.support.connectionfactory.RepositoryConnectionFactory; +import org.eclipse.rdf4j.spring.tx.TransactionalRepositoryConnectionFactory; +import org.eclipse.rdf4j.spring.tx.TxProperties; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.core.io.ResourceLoader; +import org.springframework.transaction.annotation.EnableTransactionManagement; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@Experimental +@Configuration +@EnableTransactionManagement +public class RDF4JConfig { + private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + + @Bean + RDF4JTemplate getRdf4JTemplate(@Autowired RepositoryConnectionFactory repositoryConnectionFactory, + @Autowired(required = false) OperationCacheProperties operationCacheProperties, + @Autowired ResourceLoader resourceLoader, + @Autowired(required = false) UUIDSource uuidSource) { + OperationInstantiator operationInstantiator; + if (operationCacheProperties != null && operationCacheProperties.isEnabled()) { + logger.debug("Operation caching is enabled"); + operationInstantiator = new CachingOperationInstantiator(); + } else { + logger.debug("Operation caching is not enabled"); + operationInstantiator = new DirectOperationInstantiator(); + } + return new RDF4JTemplate(repositoryConnectionFactory, operationInstantiator, resourceLoader, uuidSource); + } + + @Bean + RepositoryConnectionFactory getRepositoryConnectionFactory( + @Autowired Repository repository, + @Autowired(required = false) PoolProperties poolProperties, + @Autowired(required = false) ResultCacheProperties resultCacheProperties, + @Autowired(required = false) OperationLog operationLog, + @Autowired(required = false) TxProperties txProperties) { + RepositoryConnectionFactory factory = getDirectRepositoryConnectionFactory(repository); + + if (poolProperties != null && poolProperties.isEnabled()) { + logger.debug("Connection pooling is enabled"); + factory = wrapWithPooledRepositoryConnectionFactory(factory, poolProperties); + } else { + logger.debug("Connection pooling is not enabled"); + } + if (resultCacheProperties != null && resultCacheProperties.isEnabled()) { + factory = wrapWithCachingRepositoryConnectionFactory(factory, resultCacheProperties); + logger.debug("Result caching is enabled"); + } else { + logger.debug("Result caching is not enabled"); + } + if (operationLog != null) { + factory = wrapWithLoggingRepositoryConnectionFactory(factory, operationLog); + logger.debug("Query logging is enabled"); + } else { + logger.debug("Query logging is not enabled"); + } + if (txProperties != null && txProperties.isEnabled()) { + factory = wrapWithTxRepositoryConnectionFactory(factory); + logger.debug("Spring transaction integration is enabled"); + } else { + logger.debug("Spring transaction integration is not enabled"); + } + return factory; + } + + RepositoryConnectionFactory getDirectRepositoryConnectionFactory(Repository repository) { + return new DirectRepositoryConnectionFactory(repository); + } + + RepositoryConnectionFactory wrapWithPooledRepositoryConnectionFactory( + RepositoryConnectionFactory delegate, PoolProperties poolProperties) { + GenericObjectPoolConfig config = new GenericObjectPoolConfig<>(); + config.setMaxTotal(poolProperties.getMaxConnections()); + config.setMinIdle(poolProperties.getMinIdleConnections()); + config.setTimeBetweenEvictionRunsMillis( + poolProperties.getTimeBetweenEvictionRuns().toMillis()); + config.setTestWhileIdle(poolProperties.isTestWhileIdle()); + return new PooledRepositoryConnectionFactory(delegate, config); + } + + RepositoryConnectionFactory wrapWithLoggingRepositoryConnectionFactory( + RepositoryConnectionFactory delegate, OperationLog operationLog) { + return new LoggingRepositoryConnectionFactory(delegate, operationLog); + } + + RepositoryConnectionFactory wrapWithCachingRepositoryConnectionFactory( + RepositoryConnectionFactory delegate, ResultCacheProperties resultCacheProperties) { + return new CachingRepositoryConnectionFactory(delegate, resultCacheProperties); + } + + TransactionalRepositoryConnectionFactory wrapWithTxRepositoryConnectionFactory( + RepositoryConnectionFactory delegate) { + return new TransactionalRepositoryConnectionFactory(delegate); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/RDF4JCRUDDao.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/RDF4JCRUDDao.java new file mode 100644 index 00000000000..9848444b831 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/RDF4JCRUDDao.java @@ -0,0 +1,285 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.dao; + +import java.util.List; +import java.util.Map; +import java.util.Optional; + +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.Value; +import org.eclipse.rdf4j.query.BindingSet; +import org.eclipse.rdf4j.spring.dao.exception.IncorrectResultSetSizeException; +import org.eclipse.rdf4j.spring.dao.support.bindingsBuilder.BindingsBuilder; +import org.eclipse.rdf4j.spring.dao.support.bindingsBuilder.MutableBindings; +import org.eclipse.rdf4j.spring.dao.support.key.CompositeKey; +import org.eclipse.rdf4j.spring.dao.support.opbuilder.TupleQueryEvaluationBuilder; +import org.eclipse.rdf4j.spring.dao.support.opbuilder.UpdateExecutionBuilder; +import org.eclipse.rdf4j.spring.dao.support.sparql.NamedSparqlSupplier; +import org.eclipse.rdf4j.spring.support.RDF4JTemplate; + +/** + * Base class for DAOs providing CRUD functionality. The class allows for entities to be represented with different + * classes for read (ENTITY type) vs write (INPUT type) operations. DAOs that do not require this distinction must use + * the same class for both parameters. + * + * @param + * @param + * @param + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public abstract class RDF4JCRUDDao extends RDF4JDao { + private static final String KEY_READ_QUERY = "readQuery"; + public static final String KEY_PREFIX_INSERT = "insert"; + public static final String KEY_PREFIX_UPDATE = "update"; + private final Class idClass; + + /** + * Constructor that provides the type of the ID to the base implementation. This constructor has to be used if the + * ID is anything but IRI. + */ + public RDF4JCRUDDao(RDF4JTemplate rdf4JTemplate, Class idClass) { + super(rdf4JTemplate); + this.idClass = idClass; + } + + /** + * Constructor to be used by implementations that use IRI for the ID type. + */ + public RDF4JCRUDDao(RDF4JTemplate rdf4JTemplate) { + this(rdf4JTemplate, (Class) IRI.class); + } + + /** + * Saves the entity, loads it again and returns it. If the modified entity is not required, clients should prefer + * {@link #saveAndReturnId(Object, Object)} or {@link #saveAndReturnId(Object)} + */ + public final ENTITY save(INPUT input) { + ID id = getInputId(input); + final ID finalId = saveAndReturnId(input, id); + return getById(finalId); + } + + public ID saveAndReturnId(INPUT input) { + return saveAndReturnId(input, getInputId(input)); + } + + /** + * Saves the entity and returns its (possibly newly generated) ID. + * + * @param input the entity + * @param id the id or null for a new entity. + * @return the id (a newly generated one if the specified id is null, otherwise just id. + */ + public ID saveAndReturnId(INPUT input, ID id) { + if (id != null) { + // delete triples for the modify case + deleteForUpdate(id); + } + final ID finalId = getOrGenerateId(id); + getInsertQueryOrUseCached(input) + .withBindings(bindingsBuilder -> populateIdBindings(bindingsBuilder, finalId)) + .withBindings(bindingsBuilder -> populateBindingsForUpdate(bindingsBuilder, input)) + .execute(bindings -> postProcessUpdate(input, bindings)); + return finalId; + } + + /** + * When updating an entity via {@link #save(Object)}, its triples are removed first using this method. The default + * implementation used {@link RDF4JTemplate#deleteTriplesWithSubject(IRI)}. If more complex deletion behaviour (e.g. + * cascading) is needed, this method should be overriden. + */ + protected void deleteForUpdate(ID id) { + IRI iri = convertIdToIri(id); + getRdf4JTemplate().deleteTriplesWithSubject(iri); + } + + private ID getOrGenerateId(ID id) { + boolean idPresent; + if (id instanceof CompositeKey) { + idPresent = ((CompositeKey) id).isPresent(); + } else { + idPresent = id != null; + } + if (!idPresent) { + id = generateNewId(id); + } + return id; + } + + /** + * Converts the provided id to an IRI. The default implementation only works for DAOs that use IRI ids. + * + * @param id + * @return + */ + protected IRI convertIdToIri(ID id) { + if (id == null) { + return null; + } + if (idClass.equals(IRI.class)) { + return (IRI) id; + } + throw new UnsupportedOperationException( + "Cannot generically convert IDs to IRIs. The subclass must implement convertToIri(ID)"); + } + + /** + * Generates a new id for an entity. The default implementation only works for IRI ids. + * + * @param providedId + * @return a new id. + */ + protected ID generateNewId(ID providedId) { + if (idClass.equals(IRI.class)) { + return (ID) getRdf4JTemplate().getNewUUID(); + } + throw new UnsupportedOperationException( + "Cannot generically generate any other IDs than IRIs. The subclass must implement generateNewId(ID)"); + } + + private UpdateExecutionBuilder getInsertQueryOrUseCached(INPUT input) { + final NamedSparqlSupplier cs = getInsertSparql(input); + String key = KEY_PREFIX_INSERT + cs.getName(); + return getRdf4JTemplate().update(this.getClass(), key, cs.getSparqlSupplier()); + } + + public final List list() { + return getReadQueryOrUseCached() + .evaluateAndConvert() + .toList(this::mapSolution, this::postProcessMappedSolution); + } + + private TupleQueryEvaluationBuilder getReadQueryOrUseCached() { + return getRdf4JTemplate().tupleQuery(getClass(), KEY_READ_QUERY, this::getReadQuery); + } + + /** + * Obtains the entity with the specified id, throwing an exception if none is found. + * + * @param id the id + * @return the entity + * @throws IncorrectResultSetSizeException if no entity is found with the specified id + */ + public final ENTITY getById(ID id) { + return getByIdOptional(id) + .orElseThrow( + () -> new IncorrectResultSetSizeException( + "Expected to find exactly one entity but found 0", 1, 0)); + } + + /** + * Obtains an optional entity with the specified id. + * + * @param id the id + * @return an Optional maybe containing the entity + */ + public final Optional getByIdOptional(ID id) { + return getReadQueryOrUseCached() + .withBindings(bindingsBuilder -> populateIdBindings(bindingsBuilder, id)) + .evaluateAndConvert() + .toSingletonOptional(this::mapSolution, this::postProcessMappedSolution); + } + + /** + * Naive implementation using {@link RDF4JTemplate#delete(IRI)}. DAOs that need more complex deletion behaviour + * (e.g. cascading) should override this method. + */ + public void delete(ID id) { + if (idClass.equals(IRI.class)) { + getRdf4JTemplate().delete((IRI) id); + } else { + throw new UnsupportedOperationException( + "Cannot generically delete instances that do not use IRI ids. The subclass must implement delete(ID)"); + } + } + + /** + * Returns the SPARQL string used to read an instance of T from the database. The base implementation will cache the + * query string, so implementations should not try to cache the query. + */ + protected String getReadQuery() { + throw new UnsupportedOperationException( + "Cannot perform generic read operation: subclass does not override getReadQuery()"); + } + + /** + * Map one solution of the readQuery to the type of this DAO. + */ + protected ENTITY mapSolution(BindingSet querySolution) { + throw new UnsupportedOperationException( + "Cannot perform generic read operation: subclass does not override mapSolution()"); + } + + /** + * Callback invoked after mapping a solution to an entity, allowing subclasses to modify the entity before returning + * it to the client. + */ + protected ENTITY postProcessMappedSolution(ENTITY entity) { + return entity; + } + + /** + * Callback invoked after a successful insert/update. + */ + protected void postProcessUpdate(INPUT input, Map bindings) { + // empty default implementation + } + + /** + * Returns the SPARQL string used to write an instance of T to the database. The instance to be inserted is passed + * to the function so implementations can decide which query to use based on the instance. + */ + protected NamedSparqlSupplier getInsertSparql(INPUT input) { + throw new UnsupportedOperationException( + "Cannot perform generic write operation: subclass does not override getInsertQuery()"); + } + + /** + * Returns the SPARQL string used to update an instance of T in the database. The instance to be updated is passed + * to the function so implementations can decide which query to use based on the instance. + */ + protected NamedSparqlSupplier getUpdateSparql(INPUT input) { + throw new UnsupportedOperationException( + "Cannot perform generic write operation: subclass does not override getUpdateQuery()"); + } + + /** + * Binds the instance id to query variable(s). + */ + protected abstract void populateIdBindings(MutableBindings bindingsBuilder, ID id); + + /** + * Sets the non-id bindings on for the write query such that the instance of type I is written to the database. ID + * bindings are set through populateIdBindings() + */ + protected void populateBindingsForUpdate(MutableBindings bindingsBuilder, INPUT input) { + throw new UnsupportedOperationException( + "Cannot perform generic write operation: subclass does not override populateBindingsForUpdate()"); + } + + /** + * Obtains the id of the input instance or null if it is new (or a partially populated composite key). + */ + protected ID getInputId(INPUT input) { + throw new UnsupportedOperationException( + "Cannot perform generic write operation: subclass does not override getInputId()"); + } + + /** + * Returns a new BindingsBuilder for your convenience. + */ + protected static BindingsBuilder newBindingsBuilder() { + return new BindingsBuilder(); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/RDF4JDao.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/RDF4JDao.java new file mode 100644 index 00000000000..4bec05d581c --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/RDF4JDao.java @@ -0,0 +1,139 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.dao; + +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.ConcurrentHashMap; + +import org.eclipse.rdf4j.common.annotation.Experimental; +import org.eclipse.rdf4j.spring.dao.exception.RDF4JDaoException; +import org.eclipse.rdf4j.spring.dao.support.opbuilder.GraphQueryEvaluationBuilder; +import org.eclipse.rdf4j.spring.dao.support.opbuilder.TupleQueryEvaluationBuilder; +import org.eclipse.rdf4j.spring.dao.support.opbuilder.UpdateExecutionBuilder; +import org.eclipse.rdf4j.spring.dao.support.sparql.NamedSparqlSupplier; +import org.eclipse.rdf4j.spring.support.RDF4JTemplate; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@Experimental +public abstract class RDF4JDao { + private final RDF4JTemplate rdf4JTemplate; + + private final Map namedSparqlSuppliers = new ConcurrentHashMap<>(); + + public RDF4JDao(RDF4JTemplate rdf4JTemplate) { + this.rdf4JTemplate = rdf4JTemplate; + prepareNamedSparqlSuppliers(new NamedSparqlSupplierPreparer()); + } + + protected RDF4JTemplate getRdf4JTemplate() { + return rdf4JTemplate; + } + + protected abstract NamedSparqlSupplierPreparer prepareNamedSparqlSuppliers( + NamedSparqlSupplierPreparer preparer); + + /** + * Prepares the specified SPARQL string for later use, e.g. in + * {@link RDF4JTemplate#tupleQuery(Class, NamedSparqlSupplier)}. + */ + private void prepareNamedSparqlSupplier(String key, String sparql) { + Objects.requireNonNull(key); + Objects.requireNonNull(sparql); + namedSparqlSuppliers.put(key, new NamedSparqlSupplier(key, () -> sparql)); + } + + /** + * Reads the SPARQL string from the specified resource using a {@link org.springframework.core.io.ResourceLoader} + * and prepares it for later use, e.g. in {@link RDF4JTemplate#tupleQuery(Class, NamedSparqlSupplier)}. + */ + private void prepareNamedSparqlSupplierFromResource(String key, String resourceName) { + Objects.requireNonNull(key); + Objects.requireNonNull(resourceName); + String sparqlString = getRdf4JTemplate().getStringSupplierFromResourceContent(resourceName).get(); + namedSparqlSuppliers.put(key, new NamedSparqlSupplier(key, () -> sparqlString)); + } + + /** + * Obtains the {@link NamedSparqlSupplier} with the specified key for use in, e.g., + * {@link RDF4JTemplate#tupleQuery(Class, NamedSparqlSupplier)}. + */ + protected NamedSparqlSupplier getNamedSparqlSupplier(String key) { + Objects.requireNonNull(key); + NamedSparqlSupplier supplier = namedSparqlSuppliers.get(key); + if (supplier == null) { + throw new RDF4JDaoException( + String.format( + "No NamedSparqlOperation found for key %s. Prepare it using Rdf4JDao.prepareNamedSparqlSuppliers() before calling this method!", + key)); + } + return supplier; + } + + protected String getNamedSparqlString(String key) { + return getNamedSparqlSupplier(key).getSparqlSupplier().get(); + } + + protected TupleQueryEvaluationBuilder getNamedTupleQuery(String key) { + return getRdf4JTemplate().tupleQuery(getClass(), getNamedSparqlSupplier(key)); + } + + protected GraphQueryEvaluationBuilder getNamedGraphQuery(String key) { + return getRdf4JTemplate().graphQuery(getClass(), getNamedSparqlSupplier(key)); + } + + protected UpdateExecutionBuilder getNamedUpdate(String key) { + return getRdf4JTemplate().update(getClass(), getNamedSparqlSupplier(key)); + } + + public class NamedSparqlSupplierPreparer { + + private NamedSparqlSupplierPreparer() { + } + + /** + * For the specified key, {@link java.util.function.Supplier} is registered with the + * subsequent supplySparql* method. + */ + public NamedSparqlSupplierFinishBuilder forKey(String key) { + return new NamedSparqlSupplierFinishBuilder(key); + } + } + + public class NamedSparqlSupplierFinishBuilder { + private final String key; + + public NamedSparqlSupplierFinishBuilder(String key) { + this.key = key; + } + + /** + * Supplies the specified SPARQL String. + */ + public NamedSparqlSupplierPreparer supplySparql(String sparql) { + prepareNamedSparqlSupplier(key, sparql); + return new NamedSparqlSupplierPreparer(); + } + + /** + * Loads the specified resource using a {@link org.springframework.core.io.ResourceLoader} and + * supplies its content as String, the assumption is that it contains a SPARQL operation. + */ + public NamedSparqlSupplierPreparer supplySparqlFromResource(String resource) { + prepareNamedSparqlSupplierFromResource(key, resource); + return new NamedSparqlSupplierPreparer(); + } + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/SimpleRDF4JCRUDDao.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/SimpleRDF4JCRUDDao.java new file mode 100644 index 00000000000..d454f047588 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/SimpleRDF4JCRUDDao.java @@ -0,0 +1,26 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.dao; + +import org.eclipse.rdf4j.common.annotation.Experimental; +import org.eclipse.rdf4j.spring.support.RDF4JTemplate; + +@Experimental +public abstract class SimpleRDF4JCRUDDao extends RDF4JCRUDDao { + public SimpleRDF4JCRUDDao(RDF4JTemplate rdf4JTemplate, Class idClass) { + super(rdf4JTemplate, idClass); + } + + public SimpleRDF4JCRUDDao(RDF4JTemplate rdf4JTemplate) { + super(rdf4JTemplate); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/exception/IncorrectResultSetSizeException.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/exception/IncorrectResultSetSizeException.java new file mode 100644 index 00000000000..dbacfc618ec --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/exception/IncorrectResultSetSizeException.java @@ -0,0 +1,58 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.dao.exception; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class IncorrectResultSetSizeException extends RDF4JDaoException { + int expectedSize; + int actualSize; + + public IncorrectResultSetSizeException(int expectedSize, int actualSize) { + super(makeMessage(expectedSize, actualSize)); + this.expectedSize = expectedSize; + this.actualSize = actualSize; + } + + private static String makeMessage(int expectedSize, int actualSize) { + return String.format("Expected %d results but got %d", expectedSize, actualSize); + } + + public IncorrectResultSetSizeException(String message, int expectedSize, int actualSize) { + super(message); + this.expectedSize = expectedSize; + this.actualSize = actualSize; + } + + public IncorrectResultSetSizeException( + String message, Throwable cause, int expectedSize, int actualSize) { + super(message, cause); + this.expectedSize = expectedSize; + this.actualSize = actualSize; + } + + public IncorrectResultSetSizeException(Throwable cause, int expectedSize, int actualSize) { + super(makeMessage(expectedSize, actualSize), cause); + this.expectedSize = expectedSize; + this.actualSize = actualSize; + } + + public int getExpectedSize() { + return expectedSize; + } + + public int getActualSize() { + return actualSize; + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/exception/RDF4JDaoException.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/exception/RDF4JDaoException.java new file mode 100644 index 00000000000..00258527e12 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/exception/RDF4JDaoException.java @@ -0,0 +1,34 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.dao.exception; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class RDF4JDaoException extends RDF4JSpringException { + public RDF4JDaoException() { + } + + public RDF4JDaoException(String message) { + super(message); + } + + public RDF4JDaoException(String message, Throwable cause) { + super(message, cause); + } + + public RDF4JDaoException(Throwable cause) { + super(cause); + } + +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/exception/RDF4JSpringException.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/exception/RDF4JSpringException.java new file mode 100644 index 00000000000..1ff01905cc9 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/exception/RDF4JSpringException.java @@ -0,0 +1,36 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.dao.exception; + +import org.eclipse.rdf4j.common.exception.RDF4JException; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class RDF4JSpringException extends RDF4JException { + public RDF4JSpringException() { + } + + public RDF4JSpringException(String message) { + super(message); + } + + public RDF4JSpringException(String message, Throwable cause) { + super(message, cause); + } + + public RDF4JSpringException(Throwable cause) { + super(cause); + } + +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/exception/UnexpectedResultException.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/exception/UnexpectedResultException.java new file mode 100644 index 00000000000..c9ef0a9aed4 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/exception/UnexpectedResultException.java @@ -0,0 +1,34 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.dao.exception; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class UnexpectedResultException extends RDF4JDaoException { + public UnexpectedResultException() { + } + + public UnexpectedResultException(String message) { + super(message); + } + + public UnexpectedResultException(String message, Throwable cause) { + super(message, cause); + } + + public UnexpectedResultException(Throwable cause) { + super(cause); + } + +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/exception/UnsupportedDataTypeException.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/exception/UnsupportedDataTypeException.java new file mode 100644 index 00000000000..08563fa72e4 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/exception/UnsupportedDataTypeException.java @@ -0,0 +1,33 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.dao.exception; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class UnsupportedDataTypeException extends UnexpectedResultException { + public UnsupportedDataTypeException() { + } + + public UnsupportedDataTypeException(String message) { + super(message); + } + + public UnsupportedDataTypeException(String message, Throwable cause) { + super(message, cause); + } + + public UnsupportedDataTypeException(Throwable cause) { + super(cause); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/exception/mapper/ExceptionMapper.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/exception/mapper/ExceptionMapper.java new file mode 100644 index 00000000000..b361f2336a5 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/exception/mapper/ExceptionMapper.java @@ -0,0 +1,28 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.dao.exception.mapper; + +import org.eclipse.rdf4j.spring.dao.exception.RDF4JSpringException; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class ExceptionMapper { + + public static RDF4JSpringException mapException(String message, Exception e) { + if (e instanceof RDF4JSpringException) { + return (RDF4JSpringException) e; + } + return new RDF4JSpringException(message, e); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/package-info.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/package-info.java new file mode 100644 index 00000000000..00317c279c0 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/package-info.java @@ -0,0 +1,43 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +/** + * + * + *

Rdf4j-Spring DAO

+ * + * Support for custom DAO (data access object) implementations. + * + *

+ * Such custom DAO implementations get access to the following subsystems: + * + *

    + *
  • {@link org.eclipse.rdf4j.spring.support.RDF4JTemplate Rdf4JTemplate}: Central service for accessing + * repositories,executing queries and updates, as well as transforming results into java entities or collections + *
  • {@link org.eclipse.rdf4j.spring.dao.support.sparql.NamedSparqlSupplier NamedSparqlSupplier}: DAO-specific map of + * SPARQL Strings aiding efficient generation and caching of operaitons + *
+ * + *

+ * There are two variants of DAOs: + * + *

    + *
  • {@link org.eclipse.rdf4j.spring.dao.RDF4JDao Rdf4JDao}: Base class for DAOs with support for named operations and + * access + *
  • {@link org.eclipse.rdf4j.spring.dao.RDF4JCRUDDao Rdf4JCRUDDao}: Base class for DAOs that are associated with + * specific entity classes, providing additional support for CRUD operations on these entities. + *
+ * + * @since 4.0.0 + * @author Florian Kleedorfer + * + */ +package org.eclipse.rdf4j.spring.dao; diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/BindingSetMapper.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/BindingSetMapper.java new file mode 100644 index 00000000000..f8805cbf472 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/BindingSetMapper.java @@ -0,0 +1,39 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.dao.support; + +import java.util.function.Function; + +import org.eclipse.rdf4j.query.BindingSet; + +/** + * Maps a query solution to an instance. + * + * @param + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public interface BindingSetMapper extends Function { + + /** + * Maps a query solution to an instance of T. If the return value is null the mapper + * indicates that the solution is to be disregarded. + * + * @return an instance of T or null if the solution should be ignored. + */ + @Override + T apply(BindingSet bindings); + + static BindingSetMapper identity() { + return b -> b; + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/MappingPostProcessor.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/MappingPostProcessor.java new file mode 100644 index 00000000000..65548c7eabb --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/MappingPostProcessor.java @@ -0,0 +1,21 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.dao.support; + +import java.util.function.Function; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public interface MappingPostProcessor extends Function { +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/RelationMapBuilder.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/RelationMapBuilder.java new file mode 100644 index 00000000000..d4b982fbda8 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/RelationMapBuilder.java @@ -0,0 +1,320 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.dao.support; + +import static org.eclipse.rdf4j.sparqlbuilder.rdf.Rdf.iri; +import static org.eclipse.rdf4j.spring.util.QueryResultUtils.getIRI; +import static org.eclipse.rdf4j.spring.util.QueryResultUtils.getIRIOptional; + +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.stream.Collectors; + +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.Value; +import org.eclipse.rdf4j.model.impl.SimpleValueFactory; +import org.eclipse.rdf4j.query.BindingSet; +import org.eclipse.rdf4j.sparqlbuilder.core.Projectable; +import org.eclipse.rdf4j.sparqlbuilder.core.SparqlBuilder; +import org.eclipse.rdf4j.sparqlbuilder.core.Variable; +import org.eclipse.rdf4j.sparqlbuilder.core.query.Queries; +import org.eclipse.rdf4j.sparqlbuilder.graphpattern.GraphPattern; +import org.eclipse.rdf4j.sparqlbuilder.graphpattern.TriplePattern; +import org.eclipse.rdf4j.sparqlbuilder.rdf.RdfPredicate; +import org.eclipse.rdf4j.spring.dao.support.bindingsBuilder.BindingsBuilder; +import org.eclipse.rdf4j.spring.dao.support.opbuilder.TupleQueryEvaluationBuilder; +import org.eclipse.rdf4j.spring.support.RDF4JTemplate; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class RelationMapBuilder { + public static final Variable _relSubject = SparqlBuilder.var("rel_subject"); + public static final Variable _relObject = SparqlBuilder.var("rel_object"); + private static final Variable _relKey = SparqlBuilder.var("rel_key"); + private static final Variable _relValue = SparqlBuilder.var("rel_value"); + private static final IRI NOTHING = SimpleValueFactory.getInstance() + .createIRI("urn:java:relationDaoSupport:Nothing"); + private final RdfPredicate predicate; + private GraphPattern[] constraints = new GraphPattern[0]; + private final RDF4JTemplate rdf4JTemplate; + private boolean isRelationOptional = false; + private boolean isSubjectKeyed = true; + private final BindingsBuilder bindingsBuilder = new BindingsBuilder(); + + public RelationMapBuilder(RDF4JTemplate rdf4JTemplate, RdfPredicate predicate) { + this.rdf4JTemplate = rdf4JTemplate; + this.predicate = predicate; + } + + public RelationMapBuilder(RDF4JTemplate rdf4JTemplate, IRI predicate) { + this.rdf4JTemplate = rdf4JTemplate; + this.predicate = iri(predicate); + } + + /** + * Constrains the result iff the {@link GraphPattern} contains the variables {@link RelationMapBuilder#_relSubject} + * and/or {@link RelationMapBuilder#_relObject}, which are the variables in the triple with the {@link RdfPredicate} + * specified in the constructor. + */ + public RelationMapBuilder constraints(GraphPattern... constraints) { + this.constraints = constraints; + return this; + } + + /** + * Indicates that the existence of the triple is not required, allowing to use the constraints to select certain + * subjects and to answer the mapping to an empty Set in the {@link RelationMapBuilder#buildOneToMany()} case and + * {@link RelationMapBuilder#NOTHING} in the {@link RelationMapBuilder#buildOneToOne()} case. + * + * @return the builder + */ + public RelationMapBuilder relationIsOptional() { + this.isRelationOptional = true; + return this; + } + + /** + * Indicates that the builder should use the triple's object for the key in the resulting {@link Map} instead of the + * subject (the default). + */ + public RelationMapBuilder useRelationObjectAsKey() { + this.isSubjectKeyed = false; + return this; + } + + /** + * Builds a One-to-One Map using the configuration of this builder. Throws an Exception if more than one values are + * found for a given key. If {@link #isRelationOptional} is true + * and no triple is found for the key, {@link #NOTHING} is set as the value. + */ + public Map buildOneToOne() { + return makeTupleQueryBuilder() + .evaluateAndConvert() + .toMap(b -> getIRI(b, _relKey), this::getRelationValueOrNothing); + } + + /** + * Builds a One-to-Many Map using the configuration of this builder. + */ + public Map> buildOneToMany() { + return makeTupleQueryBuilder() + .evaluateAndConvert() + .mapAndCollect( + Function.identity(), + Collectors.toMap( + b -> getIRI(b, _relKey), + b -> getIRIOptional(b, _relValue) + .map(Set::of) + .orElseGet(Set::of), + RelationMapBuilder::mergeSets)); + } + + private static Set mergeSets(Set left, Set right) { + Set merged = new HashSet<>(left); + merged.addAll(right); + return merged; + } + + private IRI getRelationValue(BindingSet b) { + if (isRelationOptional) { + return getIRIOptional(b, _relValue).orElse(NOTHING); + } else { + return getIRI(b, _relValue); + } + } + + private IRI getRelationValueOrNothing(BindingSet b) { + if (isRelationOptional) { + return getIRIOptional(b, _relValue).orElse(NOTHING); + } else { + return getIRI(b, _relValue); + } + } + + private TupleQueryEvaluationBuilder makeTupleQueryBuilder() { + return rdf4JTemplate + .tupleQuery( + Queries.SELECT(getProjection()) + .where(getWhereClause()) + .distinct() + .getQueryString()) + .withBindings(bindingsBuilder.build()); + } + + private Projectable[] getProjection() { + if (this.isSubjectKeyed) { + return new Projectable[] { + SparqlBuilder.as(_relSubject, _relKey), SparqlBuilder.as(_relObject, _relValue) + }; + } else { + return new Projectable[] { + SparqlBuilder.as(_relSubject, _relValue), SparqlBuilder.as(_relObject, _relKey) + }; + } + } + + private GraphPattern[] getWhereClause() { + TriplePattern tp = _relSubject.has(predicate, _relObject); + if (this.isRelationOptional) { + GraphPattern[] ret = new GraphPattern[constraints.length + 1]; + ret[0] = tp.optional(); + System.arraycopy(constraints, 0, ret, 1, constraints.length); + return ret; + } else { + return new GraphPattern[] { tp.and(constraints) }; + } + } + + public RelationMapBuilder withBinding(Variable key, Value value) { + bindingsBuilder.add(key, value); + return this; + } + + public RelationMapBuilder withBinding(String key, Value value) { + bindingsBuilder.add(key, value); + return this; + } + + public RelationMapBuilder withBindingMaybe(Variable key, Value value) { + bindingsBuilder.addMaybe(key, value); + return this; + } + + public RelationMapBuilder withBindingMaybe(String key, Value value) { + bindingsBuilder.addMaybe(key, value); + return this; + } + + public RelationMapBuilder withBinding(Variable key, IRI value) { + bindingsBuilder.add(key, value); + return this; + } + + public RelationMapBuilder withBinding(String key, IRI value) { + bindingsBuilder.add(key, value); + return this; + } + + public RelationMapBuilder withBindingMaybe(Variable key, IRI value) { + bindingsBuilder.addMaybe(key, value); + return this; + } + + public RelationMapBuilder withBindingMaybe(Variable key, String value) { + bindingsBuilder.addMaybe(key, value); + return this; + } + + public RelationMapBuilder withBindingMaybe(String key, IRI value) { + bindingsBuilder.addMaybe(key, value); + return this; + } + + public RelationMapBuilder withBinding(Variable key, String value) { + bindingsBuilder.add(key, value); + return this; + } + + public RelationMapBuilder withBinding(String key, String value) { + bindingsBuilder.add(key, value); + return this; + } + + public RelationMapBuilder withBindingMaybe(String key, String value) { + bindingsBuilder.addMaybe(key, value); + return this; + } + + public RelationMapBuilder withBinding(Variable key, Integer value) { + bindingsBuilder.add(key, value); + return this; + } + + public RelationMapBuilder withBinding(String key, Integer value) { + bindingsBuilder.add(key, value); + return this; + } + + public RelationMapBuilder withBindingMaybe(Variable key, Integer value) { + bindingsBuilder.addMaybe(key, value); + return this; + } + + public RelationMapBuilder withBindingMaybe(String key, Integer value) { + bindingsBuilder.addMaybe(key, value); + return this; + } + + public RelationMapBuilder withBinding(Variable key, Boolean value) { + bindingsBuilder.add(key, value); + return this; + } + + public RelationMapBuilder withBinding(String key, Boolean value) { + bindingsBuilder.add(key, value); + return this; + } + + public RelationMapBuilder withBindingMaybe(Variable key, Boolean value) { + bindingsBuilder.addMaybe(key, value); + return this; + } + + public RelationMapBuilder withBindingMaybe(String key, Boolean value) { + bindingsBuilder.addMaybe(key, value); + return this; + } + + public RelationMapBuilder withBinding(Variable key, Float value) { + bindingsBuilder.add(key, value); + return this; + } + + public RelationMapBuilder withBinding(String key, Float value) { + bindingsBuilder.add(key, value); + return this; + } + + public RelationMapBuilder withBindingMaybe(Variable key, Float value) { + bindingsBuilder.addMaybe(key, value); + return this; + } + + public RelationMapBuilder withBindingMaybe(String key, Float value) { + bindingsBuilder.addMaybe(key, value); + return this; + } + + public RelationMapBuilder withBinding(Variable key, Double value) { + bindingsBuilder.add(key, value); + return this; + } + + public RelationMapBuilder withBinding(String key, Double value) { + bindingsBuilder.add(key, value); + return this; + } + + public RelationMapBuilder withBindingMaybe(Variable var, Double value) { + bindingsBuilder.addMaybe(var, value); + return this; + } + + public RelationMapBuilder withBindingMaybe(String key, Double value) { + bindingsBuilder.addMaybe(key, value); + return this; + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/TupleQueryResultMapper.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/TupleQueryResultMapper.java new file mode 100644 index 00000000000..beb86769c0a --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/TupleQueryResultMapper.java @@ -0,0 +1,23 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.dao.support; + +import java.util.function.Function; + +import org.eclipse.rdf4j.query.TupleQueryResult; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public interface TupleQueryResultMapper extends Function { +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/UpdateCallback.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/UpdateCallback.java new file mode 100644 index 00000000000..b3f613200e7 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/UpdateCallback.java @@ -0,0 +1,24 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.dao.support; + +import java.util.Map; +import java.util.function.Consumer; + +import org.eclipse.rdf4j.model.Value; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public interface UpdateCallback extends Consumer> { +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/UpdateWithModelBuilder.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/UpdateWithModelBuilder.java new file mode 100644 index 00000000000..3549400ecc9 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/UpdateWithModelBuilder.java @@ -0,0 +1,179 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.dao.support; + +import java.io.StringWriter; +import java.lang.invoke.MethodHandles; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.function.Consumer; +import java.util.function.Function; + +import org.apache.commons.lang3.ObjectUtils; +import org.eclipse.rdf4j.model.BNode; +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.Model; +import org.eclipse.rdf4j.model.Namespace; +import org.eclipse.rdf4j.model.Resource; +import org.eclipse.rdf4j.model.Statement; +import org.eclipse.rdf4j.model.util.ModelBuilder; +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.eclipse.rdf4j.rio.RDFFormat; +import org.eclipse.rdf4j.rio.Rio; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class UpdateWithModelBuilder { + + private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + + private final RepositoryConnection con; + private final ModelBuilder modelBuilder; + + public UpdateWithModelBuilder(RepositoryConnection con) { + this.con = con; + this.modelBuilder = new ModelBuilder(); + } + + public UpdateWithModelBuilder setNamespace(Namespace ns) { + modelBuilder.setNamespace(ns); + return this; + } + + public UpdateWithModelBuilder setNamespace(String prefix, String namespace) { + modelBuilder.setNamespace(prefix, namespace); + return this; + } + + public UpdateWithModelBuilder subject(Resource subject) { + modelBuilder.subject(subject); + return this; + } + + public UpdateWithModelBuilder subject(String prefixedNameOrIri) { + modelBuilder.subject(prefixedNameOrIri); + return this; + } + + public UpdateWithModelBuilder namedGraph(Resource namedGraph) { + modelBuilder.namedGraph(namedGraph); + return this; + } + + public UpdateWithModelBuilder namedGraph(String prefixedNameOrIRI) { + modelBuilder.namedGraph(prefixedNameOrIRI); + return this; + } + + public UpdateWithModelBuilder defaultGraph() { + modelBuilder.defaultGraph(); + return this; + } + + public UpdateWithModelBuilder addMaybe(Resource subject, IRI predicate, Object object) { + if (ObjectUtils.allNotNull(subject, predicate, object)) { + return add(subject, predicate, object); + } + return this; + } + + public UpdateWithModelBuilder add(Resource subject, IRI predicate, Object object) { + modelBuilder.add(subject, predicate, object); + return this; + } + + public UpdateWithModelBuilder addMaybe(String subject, IRI predicate, Object object) { + if (ObjectUtils.allNotNull(subject, predicate, object)) { + return add(subject, predicate, object); + } + return this; + } + + public UpdateWithModelBuilder add(String subject, IRI predicate, Object object) { + modelBuilder.add(subject, predicate, object); + return this; + } + + public UpdateWithModelBuilder addMaybe(String subject, String predicate, Object object) { + if (ObjectUtils.allNotNull(subject, predicate, object)) { + return add(subject, predicate, object); + } + return this; + } + + public UpdateWithModelBuilder add(String subject, String predicate, Object object) { + modelBuilder.add(subject, predicate, object); + return this; + } + + public UpdateWithModelBuilder addMaybe(IRI predicate, Object object) { + if (ObjectUtils.allNotNull(predicate, object)) { + return add(predicate, object); + } + return this; + } + + public UpdateWithModelBuilder add(IRI predicate, Object object) { + modelBuilder.add(predicate, object); + return this; + } + + public UpdateWithModelBuilder addMaybe(String predicate, Object object) { + if (ObjectUtils.allNotNull(predicate, object)) { + return add(predicate, object); + } + return this; + } + + public UpdateWithModelBuilder add(String predicate, Object object) { + modelBuilder.add(predicate, object); + return this; + } + + public void acceptConnection(Consumer connectionConsumer) { + connectionConsumer.accept(this.con); + } + + public T applyToConnection(Function function) { + return function.apply(con); + } + + public BNode createBNode() { + return con.getValueFactory().createBNode(); + } + + public UpdateWithModelBuilder withSink(Consumer> consumer) { + List sink = new ArrayList<>(); + consumer.accept(sink); + if (!sink.isEmpty()) { + sink.stream() + .forEach( + s -> modelBuilder.add(s.getSubject(), s.getPredicate(), s.getObject())); + } + return this; + } + + public void execute() { + Model model = modelBuilder.build(); + if (logger.isDebugEnabled()) { + StringWriter sw = new StringWriter(); + Rio.write(model, sw, RDFFormat.TURTLE); + logger.debug("adding the following triples:\n{}", sw.toString()); + } + con.add(model); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/bindingsBuilder/BindingsBuilder.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/bindingsBuilder/BindingsBuilder.java new file mode 100644 index 00000000000..e243d8044b6 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/bindingsBuilder/BindingsBuilder.java @@ -0,0 +1,222 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.dao.support.bindingsBuilder; + +import java.util.HashMap; +import java.util.Map; +import java.util.Objects; + +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.Value; +import org.eclipse.rdf4j.model.impl.SimpleValueFactory; +import org.eclipse.rdf4j.sparqlbuilder.core.Variable; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class BindingsBuilder implements MutableBindings { + private final Map bindings; + + public BindingsBuilder() { + this.bindings = new HashMap<>(); + } + + public BindingsBuilder(Map bindings) { + this.bindings = bindings; + } + + public Map build() { + return this.bindings; + } + + @Override + public BindingsBuilder add(Variable key, Value value) { + return add(key.getVarName(), value); + } + + @Override + public BindingsBuilder add(String key, Value value) { + Objects.requireNonNull(value); + return addMaybe(key, value); + } + + @Override + public BindingsBuilder addMaybe(Variable key, Value value) { + return addMaybe(key.getVarName(), value); + } + + @Override + public BindingsBuilder addMaybe(String key, Value value) { + checkKeyNotPresent(key); + if (value != null) { + bindings.put(key, value); + } + return this; + } + + @Override + public BindingsBuilder add(Variable key, IRI value) { + return add(key.getVarName(), value); + } + + @Override + public BindingsBuilder add(String key, IRI value) { + Objects.requireNonNull(value); + return addMaybe(key, value); + } + + @Override + public BindingsBuilder addMaybe(Variable key, IRI value) { + return addMaybe(key.getVarName(), value); + } + + @Override + public BindingsBuilder addMaybe(Variable key, String value) { + return addMaybe(key.getVarName(), value); + } + + @Override + public BindingsBuilder addMaybe(String key, IRI value) { + checkKeyNotPresent(key); + if (value != null) { + bindings.put(key, value); + } + return this; + } + + @Override + public BindingsBuilder add(Variable key, String value) { + return add(key.getVarName(), value); + } + + @Override + public BindingsBuilder add(String key, String value) { + Objects.requireNonNull(value); + return addMaybe(key, value); + } + + public BindingsBuilder addMaybe(String key, String value) { + checkKeyNotPresent(key); + if (value != null) { + bindings.put(key, SimpleValueFactory.getInstance().createLiteral(value)); + } + return this; + } + + @Override + public BindingsBuilder add(Variable key, Integer value) { + return add(key.getVarName(), value); + } + + @Override + public BindingsBuilder add(String key, Integer value) { + Objects.requireNonNull(value); + return addMaybe(key, value); + } + + @Override + public BindingsBuilder addMaybe(Variable key, Integer value) { + return addMaybe(key.getVarName(), value); + } + + @Override + public BindingsBuilder addMaybe(String key, Integer value) { + checkKeyNotPresent(key); + if (value != null) { + bindings.put(key, SimpleValueFactory.getInstance().createLiteral(value)); + } + return this; + } + + @Override + public BindingsBuilder add(Variable key, Boolean value) { + return add(key.getVarName(), value); + } + + @Override + public BindingsBuilder add(String key, Boolean value) { + Objects.requireNonNull(value); + return addMaybe(key, value); + } + + @Override + public BindingsBuilder addMaybe(Variable key, Boolean value) { + return addMaybe(key.getVarName(), value); + } + + @Override + public BindingsBuilder addMaybe(String key, Boolean value) { + checkKeyNotPresent(key); + if (value != null) { + bindings.put(key, SimpleValueFactory.getInstance().createLiteral(value)); + } + return this; + } + + @Override + public BindingsBuilder add(Variable key, Float value) { + return add(key.getVarName(), value); + } + + @Override + public BindingsBuilder add(String key, Float value) { + Objects.requireNonNull(value); + return addMaybe(key, value); + } + + @Override + public BindingsBuilder addMaybe(Variable key, Float value) { + return addMaybe(key.getVarName(), value); + } + + @Override + public BindingsBuilder addMaybe(String key, Float value) { + checkKeyNotPresent(key); + if (value != null) { + bindings.put(key, SimpleValueFactory.getInstance().createLiteral(value)); + } + return this; + } + + @Override + public BindingsBuilder add(Variable key, Double value) { + return add(key.getVarName(), value); + } + + @Override + public BindingsBuilder add(String key, Double value) { + Objects.requireNonNull(value); + return addMaybe(key, value); + } + + @Override + public BindingsBuilder addMaybe(Variable var, Double value) { + return addMaybe(var.getVarName(), value); + } + + @Override + public BindingsBuilder addMaybe(String key, Double value) { + checkKeyNotPresent(key); + if (bindings != null) { + bindings.put(key, SimpleValueFactory.getInstance().createLiteral(value)); + } + return this; + } + + private void checkKeyNotPresent(String key) { + if (bindings.containsKey(key)) { + throw new IllegalArgumentException( + String.format("Binding for key '%s' already registered", key)); + } + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/bindingsBuilder/MutableBindings.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/bindingsBuilder/MutableBindings.java new file mode 100644 index 00000000000..a8975d4ad05 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/bindingsBuilder/MutableBindings.java @@ -0,0 +1,80 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.dao.support.bindingsBuilder; + +import org.eclipse.rdf4j.common.annotation.Experimental; +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.Value; +import org.eclipse.rdf4j.sparqlbuilder.core.Variable; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@Experimental +public interface MutableBindings { + BindingsBuilder add(Variable key, Value value); + + BindingsBuilder add(String key, Value value); + + BindingsBuilder add(Variable key, IRI value); + + BindingsBuilder add(String key, IRI value); + + BindingsBuilder add(Variable key, String value); + + BindingsBuilder add(String key, String value); + + BindingsBuilder add(Variable key, Integer value); + + BindingsBuilder add(String key, Integer value); + + BindingsBuilder add(Variable key, Boolean value); + + BindingsBuilder add(String key, Boolean value); + + BindingsBuilder addMaybe(Variable key, Boolean value); + + BindingsBuilder addMaybe(String key, Boolean value); + + BindingsBuilder add(Variable key, Float value); + + BindingsBuilder add(String key, Float value); + + BindingsBuilder add(Variable key, Double value); + + BindingsBuilder add(String key, Double value); + + BindingsBuilder addMaybe(Variable key, Value value); + + BindingsBuilder addMaybe(String key, Value value); + + BindingsBuilder addMaybe(Variable key, IRI value); + + BindingsBuilder addMaybe(String key, IRI value); + + BindingsBuilder addMaybe(Variable key, String value); + + BindingsBuilder addMaybe(String key, String value); + + BindingsBuilder addMaybe(Variable key, Integer value); + + BindingsBuilder addMaybe(String key, Integer value); + + BindingsBuilder addMaybe(Variable key, Float value); + + BindingsBuilder addMaybe(String key, Float value); + + BindingsBuilder addMaybe(Variable key, Double value); + + BindingsBuilder addMaybe(String key, Double value); +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/key/CompositeKey.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/key/CompositeKey.java new file mode 100644 index 00000000000..99c6237eae5 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/key/CompositeKey.java @@ -0,0 +1,23 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.dao.support.key; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public interface CompositeKey { + /** + * Returns true if the composite key is fully defined. + */ + boolean isPresent(); +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/key/CompositeKey2.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/key/CompositeKey2.java new file mode 100644 index 00000000000..8c0d8e5032c --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/key/CompositeKey2.java @@ -0,0 +1,59 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.dao.support.key; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class CompositeKey2 implements CompositeKey { + private final K1 key1; + private final K2 key2; + + public CompositeKey2(K1 key1, K2 key2) { + this.key1 = key1; + this.key2 = key2; + } + + @Override + public boolean isPresent() { + return key1 != null && key2 != null; + } + + public K1 getKey1() { + return key1; + } + + public K2 getKey2() { + return key2; + } + + public CompositeKey2 setKey1(K1 key1) { + if (this.key1 != null) { + throw new IllegalArgumentException( + String.format( + "Refusing to replace key1 - it is already set to value '%s'", + this.key1.toString())); + } + return new CompositeKey2<>(key1, this.key2); + } + + public CompositeKey2 setKey2(K2 key2) { + if (this.key2 != null) { + throw new IllegalArgumentException( + String.format( + "Refusing to replace key2 - it is already set to value '%s'", + this.key2.toString())); + } + return new CompositeKey2<>(this.key1, key2); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/opbuilder/GraphQueryEvaluationBuilder.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/opbuilder/GraphQueryEvaluationBuilder.java new file mode 100644 index 00000000000..b919f5621c1 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/opbuilder/GraphQueryEvaluationBuilder.java @@ -0,0 +1,56 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.dao.support.opbuilder; + +import static org.eclipse.rdf4j.spring.dao.exception.mapper.ExceptionMapper.mapException; +import static org.eclipse.rdf4j.spring.dao.support.operation.OperationUtils.setBindings; + +import java.lang.invoke.MethodHandles; +import java.util.function.Supplier; + +import org.eclipse.rdf4j.query.GraphQuery; +import org.eclipse.rdf4j.spring.dao.support.operation.GraphQueryResultConverter; +import org.eclipse.rdf4j.spring.support.RDF4JTemplate; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class GraphQueryEvaluationBuilder + extends OperationBuilder { + private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + + public GraphQueryEvaluationBuilder(GraphQuery operation, RDF4JTemplate template) { + super(operation, template); + } + + public GraphQueryResultConverter evaluateAndConvert() { + return withTryCatchAndLog( + () -> { + GraphQuery graphQuery = getOperation(); + setBindings(graphQuery, getBindings()); + return new GraphQueryResultConverter(graphQuery.evaluate()); + }, + "Error evaluating GraphQuery:\n" + getOperation().toString()); + } + + private T withTryCatchAndLog(Supplier supplier, String errorString) { + try { + return supplier.get(); + } catch (Exception e) { + logger.debug(errorString, e); + throw mapException(errorString, e); + } + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/opbuilder/OperationBuilder.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/opbuilder/OperationBuilder.java new file mode 100644 index 00000000000..4bf519d6fd8 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/opbuilder/OperationBuilder.java @@ -0,0 +1,133 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.dao.support.opbuilder; + +import java.util.Map; +import java.util.Objects; +import java.util.function.Consumer; + +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.Value; +import org.eclipse.rdf4j.query.Operation; +import org.eclipse.rdf4j.sparqlbuilder.core.Variable; +import org.eclipse.rdf4j.spring.dao.support.bindingsBuilder.BindingsBuilder; +import org.eclipse.rdf4j.spring.support.RDF4JTemplate; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class OperationBuilder> { + private final T operation; + private final BindingsBuilder bindingsBuilder = new BindingsBuilder(); + private final RDF4JTemplate rdf4JTemplate; + + public OperationBuilder(T operation, RDF4JTemplate template) { + Objects.requireNonNull(operation); + this.operation = operation; + this.rdf4JTemplate = template; + } + + protected T getOperation() { + return operation; + } + + protected RDF4JTemplate getRdf4JTemplate() { + return rdf4JTemplate; + } + + protected Map getBindings() { + return bindingsBuilder.build(); + } + + public SUB withBinding(Variable key, IRI value) { + bindingsBuilder.add(key, value); + return (SUB) this; + } + + public SUB withBinding(String key, IRI value) { + bindingsBuilder.add(key, value); + return (SUB) this; + } + + public SUB withBinding(Variable key, String value) { + bindingsBuilder.add(key, value); + return (SUB) this; + } + + public SUB withBinding(String key, String value) { + bindingsBuilder.add(key, value); + return (SUB) this; + } + + public SUB withBinding(Variable key, Integer value) { + bindingsBuilder.add(key, value); + return (SUB) this; + } + + public SUB withBinding(String key, Integer value) { + bindingsBuilder.add(key, value); + return (SUB) this; + } + + public SUB withBinding(Variable key, Boolean value) { + bindingsBuilder.add(key, value); + return (SUB) this; + } + + public SUB withBinding(String key, Boolean value) { + bindingsBuilder.add(key, value); + return (SUB) this; + } + + public SUB withBinding(Variable key, Float value) { + bindingsBuilder.add(key, value); + return (SUB) this; + } + + public SUB withBinding(String key, Float value) { + bindingsBuilder.add(key, value); + return (SUB) this; + } + + public SUB withBinding(Variable key, Double value) { + bindingsBuilder.add(key, value); + return (SUB) this; + } + + public SUB withBinding(String key, Double value) { + bindingsBuilder.add(key, value); + return (SUB) this; + } + + public SUB withBinding(String key, Value value) { + bindingsBuilder.add(key, value); + return (SUB) this; + } + + public SUB withBindings(Map bindings) { + bindings.forEach((key, value) -> bindingsBuilder.add(key, value)); + return (SUB) this; + } + + public SUB withNullableBindings(Map bindings) { + if (bindings != null) { + bindings.forEach((key, value) -> bindingsBuilder.add(key, value)); + } + return (SUB) this; + } + + public SUB withBindings(Consumer consumer) { + consumer.accept(this.bindingsBuilder); + return (SUB) this; + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/opbuilder/TupleQueryEvaluationBuilder.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/opbuilder/TupleQueryEvaluationBuilder.java new file mode 100644 index 00000000000..29c2ef298e1 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/opbuilder/TupleQueryEvaluationBuilder.java @@ -0,0 +1,46 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.dao.support.opbuilder; + +import static org.eclipse.rdf4j.spring.dao.exception.mapper.ExceptionMapper.mapException; +import static org.eclipse.rdf4j.spring.dao.support.operation.OperationUtils.setBindings; + +import java.lang.invoke.MethodHandles; + +import org.eclipse.rdf4j.query.TupleQuery; +import org.eclipse.rdf4j.spring.dao.support.operation.TupleQueryResultConverter; +import org.eclipse.rdf4j.spring.support.RDF4JTemplate; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class TupleQueryEvaluationBuilder + extends OperationBuilder { + private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + + public TupleQueryEvaluationBuilder(TupleQuery operation, RDF4JTemplate template) { + super(operation, template); + } + + public TupleQueryResultConverter evaluateAndConvert() { + try { + setBindings(getOperation(), getBindings()); + return new TupleQueryResultConverter(getOperation().evaluate()); + } catch (Exception e) { + logger.debug("Caught execption while evaluating TupleQuery", e); + throw mapException("Error evaluating TupleQuery:\n" + getOperation().toString(), e); + } + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/opbuilder/UpdateExecutionBuilder.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/opbuilder/UpdateExecutionBuilder.java new file mode 100644 index 00000000000..4bff107ca04 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/opbuilder/UpdateExecutionBuilder.java @@ -0,0 +1,46 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.dao.support.opbuilder; + +import static org.eclipse.rdf4j.spring.dao.support.operation.OperationUtils.setBindings; + +import java.util.Map; + +import org.eclipse.rdf4j.model.Value; +import org.eclipse.rdf4j.query.Update; +import org.eclipse.rdf4j.spring.dao.support.UpdateCallback; +import org.eclipse.rdf4j.spring.support.RDF4JTemplate; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class UpdateExecutionBuilder extends OperationBuilder { + + public UpdateExecutionBuilder(Update update, RDF4JTemplate template) { + super(update, template); + } + + public void execute() { + Update update = getOperation(); + setBindings(update, getBindings()); + update.execute(); + } + + public void execute(UpdateCallback updateCallback) { + Map bindings = getBindings(); + Update update = getOperation(); + setBindings(update, bindings); + update.execute(); + updateCallback.accept(bindings); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/operation/GraphQueryResultConverter.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/operation/GraphQueryResultConverter.java new file mode 100644 index 00000000000..405827b593d --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/operation/GraphQueryResultConverter.java @@ -0,0 +1,48 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.dao.support.operation; + +import static org.eclipse.rdf4j.spring.dao.exception.mapper.ExceptionMapper.mapException; + +import java.lang.invoke.MethodHandles; + +import org.eclipse.rdf4j.model.Model; +import org.eclipse.rdf4j.model.impl.TreeModelFactory; +import org.eclipse.rdf4j.query.GraphQueryResult; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class GraphQueryResultConverter { + private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private final GraphQueryResult graphQueryResult; + + public GraphQueryResultConverter(GraphQueryResult graphQueryResult) { + this.graphQueryResult = graphQueryResult; + } + + public Model toModel() { + try { + Model resultModel = new TreeModelFactory().createEmptyModel(); + graphQueryResult.forEach(resultModel::add); + return resultModel; + } catch (Exception e) { + logger.debug("Error converting graph query result to model", e); + throw mapException("Error converting graph query result to model", e); + } finally { + graphQueryResult.close(); + } + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/operation/OperationType.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/operation/OperationType.java new file mode 100644 index 00000000000..bfa17e52b42 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/operation/OperationType.java @@ -0,0 +1,22 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.dao.support.operation; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public enum OperationType { + TUPLEQUERY, + GRAPHQUERY, + UPDATE +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/operation/OperationUtils.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/operation/OperationUtils.java new file mode 100644 index 00000000000..0d1be9ada9c --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/operation/OperationUtils.java @@ -0,0 +1,145 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.dao.support.operation; + +import java.lang.invoke.MethodHandles; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.stream.Collector; +import java.util.stream.Collectors; + +import org.eclipse.rdf4j.model.Value; +import org.eclipse.rdf4j.query.Operation; +import org.eclipse.rdf4j.sparqlbuilder.rdf.Rdf; +import org.eclipse.rdf4j.spring.dao.exception.IncorrectResultSetSizeException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class OperationUtils { + private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + + /** + * Returns the object in the {@link Collection} wrapped in an {@link Optional}, an empty Optional if the collection + * is empty, throwing an Exception if the Collection contains more than one element. + */ + public static Optional atMostOne(Collection objects) { + int size = objects.size(); + if (size > 1) { + throw new IncorrectResultSetSizeException( + "Expected to find at most one entity, but found " + size, 1, size); + } + return objects.stream().findFirst(); + } + + /** + * Returns the object contained in the specified {@link Optional}, throwing an Exception if it is empty. + */ + public static T require(Optional required) { + if (required.isEmpty()) { + throw new IncorrectResultSetSizeException( + "Expected to find exactly one entity but found 0", 1, 0); + } + return required.get(); + } + + /** + * Returns the element in the {@link Collection}, throwing an exception if the collection is empty or contains more + * than one element. + */ + public static T exactlyOne(Collection objects) { + return require(atMostOne(objects)); + } + + /** + * Returns the element in the {@link java.util.stream.Stream}, throwing an exception if the stream is empty or + * contains more than one element. + */ + public static Collector toSingleton() { + return Collectors.collectingAndThen( + Collectors.toList(), + list -> { + int size = list.size(); + if (size != 1) { + throw new IncorrectResultSetSizeException( + "Expected exactly one result, found " + size, 1, size); + } + return list.get(0); + }); + } + + /** + * Returns the element in the {@link java.util.stream.Stream}, or null if the stream is empty, throwing an exception + * if the stream contains more than one element. + */ + public static Collector toSingletonMaybe() { + return Collectors.collectingAndThen( + Collectors.toList(), + list -> { + int size = list.size(); + if (size > 1) { + throw new IncorrectResultSetSizeException( + "Expected zero or one result, found " + size, 1, size); + } else if (size == 0) { + return null; + } + return list.get(0); + }); + } + + /** + * Returns the element in the {@link java.util.stream.Stream} wrapped in an {@link Optional} throwing an exception + * if the stream contains more than one element. + */ + public static Collector> toSingletonOptional() { + return Collectors.collectingAndThen( + Collectors.toList(), + list -> { + int size = list.size(); + if (size > 1) { + throw new IncorrectResultSetSizeException( + "Expected zero or one result, found " + size, 1, 0); + } else if (size == 0) { + return Optional.empty(); + } + return Optional.ofNullable(list.get(0)); + }); + } + + public static void setBindings(Operation operation, Map bindings) { + debugLogBindings(bindings); + operation.clearBindings(); + if (bindings != null) { + bindings.entrySet() + .stream() + .forEach(entry -> operation.setBinding(entry.getKey(), entry.getValue())); + } + } + + private static void debugLogBindings(Map bindings) { + if (logger.isDebugEnabled() && bindings != null) { + logger.debug("bindings: {}", bindings); + List keys = bindings.keySet().stream().collect(Collectors.toList()); + logger.debug( + "values block:\n\nVALUES ( {} ) { ( {} ) }\n", + keys.stream().map(k -> "?" + k).collect(Collectors.joining(" ")), + keys.stream() + .map(k -> Rdf.object(bindings.get(k)).getQueryString()) + .collect(Collectors.joining(" "))); + } + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/operation/TupleQueryEvaluator.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/operation/TupleQueryEvaluator.java new file mode 100644 index 00000000000..37d9b412887 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/operation/TupleQueryEvaluator.java @@ -0,0 +1,66 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.dao.support.operation; + +import static org.eclipse.rdf4j.spring.dao.exception.mapper.ExceptionMapper.mapException; +import static org.eclipse.rdf4j.spring.dao.support.operation.OperationUtils.setBindings; + +import java.lang.invoke.MethodHandles; +import java.util.Map; +import java.util.Objects; + +import org.eclipse.rdf4j.model.Value; +import org.eclipse.rdf4j.query.TupleQuery; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class TupleQueryEvaluator { + private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private final TupleQuery query; + private final Map bindings; + + private TupleQueryEvaluator(TupleQuery query) { + this.query = query; + this.bindings = null; + } + + private TupleQueryEvaluator(TupleQuery query, Map bindings) { + Objects.requireNonNull(query); + Objects.requireNonNull(bindings); + this.query = query; + this.bindings = bindings; + } + + public static TupleQueryEvaluator of(TupleQuery query, Map bindings) { + return new TupleQueryEvaluator(query, bindings); + } + + public static TupleQueryEvaluator of(TupleQuery query) { + return new TupleQueryEvaluator(query); + } + + public TupleQueryResultConverter execute() { + try { + if (this.bindings != null) { + setBindings(query, bindings); + } + return new TupleQueryResultConverter(query.evaluate()); + } catch (Exception e) { + logger.debug("Caught execption while evaluating TupleQuery", e); + throw mapException("Error evaluating TupleQuery", e); + } + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/operation/TupleQueryResultConverter.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/operation/TupleQueryResultConverter.java new file mode 100644 index 00000000000..a65be43693d --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/operation/TupleQueryResultConverter.java @@ -0,0 +1,325 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.dao.support.operation; + +import static java.util.stream.Collectors.mapping; + +import static org.eclipse.rdf4j.spring.dao.exception.mapper.ExceptionMapper.mapException; +import static org.eclipse.rdf4j.spring.dao.support.operation.OperationUtils.require; + +import java.lang.invoke.MethodHandles; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; +import java.util.Set; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.stream.Collector; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import org.eclipse.rdf4j.query.BindingSet; +import org.eclipse.rdf4j.query.TupleQueryResult; +import org.eclipse.rdf4j.spring.dao.support.BindingSetMapper; +import org.eclipse.rdf4j.spring.dao.support.MappingPostProcessor; +import org.eclipse.rdf4j.spring.dao.support.TupleQueryResultMapper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class TupleQueryResultConverter { + private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private TupleQueryResult tupleQueryResult; + + public TupleQueryResultConverter(TupleQueryResult result) { + Objects.requireNonNull(result); + this.tupleQueryResult = result; + } + + /** + * Passes the {@link TupleQueryResult} to the consumer and closes the result afterwards. + */ + public void consumeResult(Consumer consumer) { + try { + consumer.accept(tupleQueryResult); + } catch (Exception e) { + logger.debug("Caught execption while processing TupleQueryResult", e); + throw mapException("Error processing TupleQueryResult", e); + } finally { + tupleQueryResult.close(); + tupleQueryResult = null; + } + } + + /** + * Applies the function to the {@link TupleQueryResult} and closes the result afterwards. + */ + public T applyToResult(Function function) { + try { + return function.apply(tupleQueryResult); + } catch (Exception e) { + logger.warn("Caught execption while processing TupleQueryResult", e); + throw mapException("Error processing TupleQueryResult", e); + } finally { + tupleQueryResult.close(); + tupleQueryResult = null; + } + } + + /** + * Obtains a stream of {@link BindingSet}s. The result is completely consumed and closed when the stream is + * returned. + */ + public Stream toStream() { + return applyToResult(r -> getBindingStream(r).collect(Collectors.toList()).stream()); + } + + private Stream toStreamInternal(Function mapper) { + return applyToResult( + result -> getBindingStream(result) + .map(mapper) + .filter(Objects::nonNull) + .collect(Collectors.toList()) + .stream()); + } + + /** + * Obtains a {@link Stream} of mapped query results. The result is completely consumed and closed when the stream is + * returned. Any null values are filterd from the resulting stream. + */ + public Stream toStream(BindingSetMapper mapper) { + return toStreamInternal(mapper); + } + + /** + * Obtains a {@link Stream} of mapped query results, using the postprocessor to map it again. Any null values are + * filtered from the resulting stream. + */ + public Stream toStream( + BindingSetMapper mapper, MappingPostProcessor postProcessor) { + return toStreamInternal(andThenOrElseNull(mapper, postProcessor)); + } + + /** + * Maps the whole {@link TupleQueryResult} to one object, which may be null. + */ + public T toSingletonMaybeOfWholeResult(TupleQueryResultMapper mapper) { + return applyToResult(mapper); + } + + /** + * Maps the whole {@link TupleQueryResult} to one {@link Optional}. + */ + public Optional toSingletonOptionalOfWholeResult(TupleQueryResultMapper mapper) { + return Optional.ofNullable(toSingletonMaybeOfWholeResult(mapper)); + } + + /** + * Maps the whole {@link TupleQueryResult} to an object, throwing an exception if the mapper returns + * null. + * + * @throws org.eclipse.rdf4j.spring.dao.exception.IncorrectResultSetSizeException + */ + public T toSingletonOfWholeResult(TupleQueryResultMapper mapper) { + return require(toSingletonOptionalOfWholeResult(mapper)); + } + + /** + * Maps the first {@link BindingSet} in the result if one exists, throwing an exception if there are more. Returns + * null if there are no results or if there is one result that is mapped to null by the specified mapper. + * + * @throws org.eclipse.rdf4j.spring.dao.exception.IncorrectResultSetSizeException + */ + public T toSingletonMaybe(BindingSetMapper mapper) { + return mapAndCollect(mapper, OperationUtils.toSingletonMaybe()); + } + + /** + * Maps the first {@link BindingSet} in the result, throwing an exception if there are more than one. Returns an + * Optional, which is empty if there are no results or if there is one result that is mapped to null by the + * specified mapper. + */ + public Optional toSingletonOptional(BindingSetMapper mapper) { + return Optional.ofNullable(toSingletonMaybe(mapper)); + } + + /** + * Maps the first {@link BindingSet} in the result, throwing an exception if there are no results or more than one. + * + * @throws org.eclipse.rdf4j.spring.dao.exception.IncorrectResultSetSizeException + */ + public T toSingleton(BindingSetMapper mapper) { + return require(toSingletonOptional(mapper)); + } + + /** + * Maps the first {@link BindingSet} in the result if one exists, throwing an exception if there are more. + * + * @throws org.eclipse.rdf4j.spring.dao.exception.IncorrectResultSetSizeException + */ + public O toSingletonMaybe( + BindingSetMapper mapper, MappingPostProcessor postProcessor) { + return mapAndCollect(andThenOrElseNull(mapper, postProcessor), OperationUtils.toSingletonMaybe()); + } + + public Optional toSingletonOptional( + BindingSetMapper mapper, MappingPostProcessor postProcessor) { + return Optional.ofNullable(toSingletonMaybe(mapper, postProcessor)); + } + + /** + * Maps the first {@link BindingSet} in the result, throwing an exception if there are no results or more than one. + * + * @throws org.eclipse.rdf4j.spring.dao.exception.IncorrectResultSetSizeException + */ + public O toSingleton( + BindingSetMapper mapper, MappingPostProcessor postProcessor) { + return require(toSingletonOptional(mapper, postProcessor)); + } + + public R mapAndCollect(Function mapper, Collector collector) { + return applyToResult( + result -> getBindingStream(result) + .map(mapper) + .filter(Objects::nonNull) + .collect(collector)); + } + + /** + * Maps the query result to a {@link List}. + */ + public List toList(BindingSetMapper mapper) { + return mapAndCollect(mapper, Collectors.toList()); + } + + /** + * Maps the query result to a {@link List}. + */ + public List toList( + BindingSetMapper mapper, MappingPostProcessor postProcessor) { + return mapAndCollect(andThenOrElseNull(mapper, postProcessor), Collectors.toList()); + } + + /** + * Maps the query result to a {@link Set}. + */ + public Set toSet(BindingSetMapper mapper) { + return mapAndCollect(mapper, Collectors.toSet()); + } + + /** + * Maps the query result to a {@link Set}. + */ + public Set toSet( + BindingSetMapper mapper, MappingPostProcessor postProcessor) { + return mapAndCollect(andThenOrElseNull(mapper, postProcessor), Collectors.toSet()); + } + + /** + * Maps the query result to a {@link Map}, throwing an Exception if there are multiple values for one key. + */ + public Map toMap( + Function keyMapper, Function valueMapper) { + return mapAndCollect(Function.identity(), Collectors.toMap(keyMapper, valueMapper)); + } + + /** + * Maps the query result to a {@link Map} of {@link Set}s. + */ + public Map> toMapOfSet( + Function keyMapper, Function valueMapper) { + return mapAndCollect( + Function.identity(), + Collectors.groupingBy( + keyMapper, Collectors.mapping(valueMapper, Collectors.toSet()))); + } + + /** + * Maps the query result to a {@link Map} of {@link List}s. + */ + public Map> toMapOfList( + Function keyMapper, Function valueMapper) { + return mapAndCollect( + Function.identity(), + Collectors.groupingBy( + keyMapper, Collectors.mapping(valueMapper, Collectors.toList()))); + } + + /** + * Maps the query result to a {@link Map}, throwing an Exception if there are multiple values for one key. + */ + public Map toMap( + BindingSetMapper mapper, Function keyMapper, Function valueMapper) { + return mapAndCollect(mapper, Collectors.toMap(keyMapper, valueMapper)); + } + + /** + * Maps the query result to a {@link Map}, throwing an Exception if there are multiple values for one key. + */ + public Map toMap(Function> entryMapper) { + return mapAndCollect( + Function.identity(), + Collectors.toMap( + bs -> entryMapper.apply(bs).getKey(), + bs -> entryMapper.apply(bs).getValue())); + } + + /** + * Maps the query result to a {@link Map} of {@link Set}s. + */ + public Map> toMapOfSet( + BindingSetMapper mapper, Function keyMapper, Function valueMapper) { + return mapAndCollect( + mapper, Collectors.groupingBy(keyMapper, mapping(valueMapper, Collectors.toSet()))); + } + + /** + * Maps the query result to a {@link Map} of {@link List}s. + */ + public Map> toMapOfList( + BindingSetMapper mapper, Function keyMapper, Function valueMapper) { + return mapAndCollect( + mapper, + Collectors.groupingBy(keyMapper, mapping(valueMapper, Collectors.toList()))); + } + + /** + * If the result has only one empty binding set, this method returns an empty stream, otherwise the stream of + * BindingSets + */ + public Stream getBindingStream(TupleQueryResult result) { + if (!result.hasNext()) { + return Stream.empty(); + } + BindingSet first = result.next(); + if (!result.hasNext() && first.isEmpty()) { + return Stream.empty(); + } + return Stream.concat(Stream.of(first), result.stream()); + } + + /** + * Executes mapper.andThen(postProcessor) unless the result of mapper is null, in which + * case the result is null. + */ + private Function andThenOrElseNull( + BindingSetMapper mapper, MappingPostProcessor postProcessor) { + return bindingSet -> Optional.ofNullable(mapper.apply(bindingSet)) + .map(postProcessor) + .orElse(null); + } + +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/sparql/NamedSparqlSupplier.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/sparql/NamedSparqlSupplier.java new file mode 100644 index 00000000000..68ddfd57b4d --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/sparql/NamedSparqlSupplier.java @@ -0,0 +1,63 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.dao.support.sparql; + +import java.util.Objects; +import java.util.function.Supplier; + +import org.eclipse.rdf4j.common.annotation.Experimental; + +/** + * Associates a String key with a {@link Supplier} that provides a SPARQL operation. + * + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@Experimental +public class NamedSparqlSupplier { + private final String name; + private final Supplier sparqlSupplier; + + public NamedSparqlSupplier(String name, Supplier sparqlSupplier) { + this.name = name; + this.sparqlSupplier = sparqlSupplier; + } + + public String getName() { + return name; + } + + public Supplier getSparqlSupplier() { + return sparqlSupplier; + } + + public static NamedSparqlSupplier of(String key, Supplier generator) { + return new NamedSparqlSupplier(key, generator); + } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + NamedSparqlSupplier that = (NamedSparqlSupplier) o; + return Objects.equals(name, that.name); + } + + @Override + public int hashCode() { + return Objects.hash(name); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/sparql/PreparedSparqlManager.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/sparql/PreparedSparqlManager.java new file mode 100644 index 00000000000..d24b2182747 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/dao/support/sparql/PreparedSparqlManager.java @@ -0,0 +1,59 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.dao.support.sparql; + +import java.lang.invoke.MethodHandles; +import java.util.function.Function; + +import org.apache.commons.collections4.map.LRUMap; +import org.eclipse.rdf4j.query.Operation; +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class PreparedSparqlManager { + + private final LRUMap preparedSparqlMap = new LRUMap<>(500, 100); + private final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + + public T get(String sparqlString, RepositoryConnection con, Function preparer) { + String key = sparqlString + "@con" + con.hashCode(); + logger.debug("obtaining prepared sparql operation..."); + long startGet = System.currentTimeMillis(); + Object element = preparedSparqlMap.get(key); + T preparedSparql = (T) element; + if (preparedSparql == null) { + logger.debug("\tnot found in prepared operation map, preparing new operation..."); + long start = System.currentTimeMillis(); + try { + preparedSparql = preparer.apply(sparqlString); + } catch (Exception e) { + logger.debug("Error preparing the follwing query:\n{}", sparqlString); + throw e; + } + long stop = System.currentTimeMillis(); + logger.debug("\tpreparing the operation took {} millis", stop - start); + preparedSparqlMap.put(key, preparedSparql); + } + ((Operation) preparedSparql).clearBindings(); + long endGet = System.currentTimeMillis(); + if (logger.isDebugEnabled()) { + logger.debug("obtaining prepared sparql operation took {} millis", endGet - startGet); + logger.debug("sparql:\n{}", sparqlString); + } + return preparedSparql; + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationcache/CachingOperationInstantiator.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationcache/CachingOperationInstantiator.java new file mode 100644 index 00000000000..242e74f7a02 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationcache/CachingOperationInstantiator.java @@ -0,0 +1,148 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.operationcache; + +import static org.eclipse.rdf4j.spring.util.RepositoryConnectionWrappingUtils.findWrapper; + +import java.lang.invoke.MethodHandles; +import java.util.Collections; +import java.util.Map; +import java.util.Optional; +import java.util.WeakHashMap; +import java.util.function.Supplier; + +import org.apache.commons.collections4.map.LRUMap; +import org.eclipse.rdf4j.query.GraphQuery; +import org.eclipse.rdf4j.query.Operation; +import org.eclipse.rdf4j.query.TupleQuery; +import org.eclipse.rdf4j.query.Update; +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.eclipse.rdf4j.spring.resultcache.CachingRepositoryConnection; +import org.eclipse.rdf4j.spring.resultcache.ClearableAwareUpdate; +import org.eclipse.rdf4j.spring.resultcache.ResultCachingGraphQuery; +import org.eclipse.rdf4j.spring.resultcache.ResultCachingTupleQuery; +import org.eclipse.rdf4j.spring.support.DirectOperationInstantiator; +import org.eclipse.rdf4j.spring.util.RepositoryConnectionWrappingUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class CachingOperationInstantiator extends DirectOperationInstantiator { + private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private final Map> cachedOperations = Collections + .synchronizedMap(new WeakHashMap<>()); + + @Override + public TupleQuery getTupleQuery( + RepositoryConnection con, + Class owner, + String operationName, + Supplier tupleQueryStringSupplier) { + return cachedOrNewOp( + TupleQuery.class, + con, + owner, + operationName, + () -> getTupleQuery(con, tupleQueryStringSupplier.get())); + } + + @Override + public Update getUpdate( + RepositoryConnection con, + Class owner, + String operationName, + Supplier updateStringSupplier) { + return cachedOrNewOp( + Update.class, + con, + owner, + operationName, + () -> getUpdate(con, updateStringSupplier.get())); + } + + @Override + public GraphQuery getGraphQuery( + RepositoryConnection con, + Class owner, + String operationName, + Supplier graphQueryStringSupplier) { + return cachedOrNewOp( + GraphQuery.class, + con, + owner, + operationName, + () -> getGraphQuery(con, graphQueryStringSupplier.get())); + } + + private T cachedOrNewOp( + Class type, + RepositoryConnection con, + Class owner, + String operationName, + Supplier operationSupplier) { + String key = makeOperationCacheKey(type, owner, operationName); + if (logger.isDebugEnabled()) { + logger.debug( + "Obtaining operation of type {} for owner {} with name {}", + type.getSimpleName(), + owner, + operationName); + } + RepositoryConnection rootConnection = RepositoryConnectionWrappingUtils.findRoot(con); + Map cachedOperationsForConnection = this.cachedOperations.get(rootConnection); + if (cachedOperationsForConnection == null) { + if (logger.isDebugEnabled()) { + logger.debug( + "No operations cached with connection yet, initializing operation cache for connection {}", + rootConnection.hashCode()); + } + cachedOperationsForConnection = new LRUMap<>(200, 10); + this.cachedOperations.put(rootConnection, cachedOperationsForConnection); + } + Operation op = cachedOperationsForConnection.get(key); + if (op == null) { + if (logger.isDebugEnabled()) { + logger.debug("Instantiating operation and caching for future reuse"); + } + op = operationSupplier.get(); + cachedOperationsForConnection.put(key, op); + } else { + renewLocalCacheIfPossible(op, con); + if (logger.isDebugEnabled()) { + logger.debug("Reusing cached operation"); + } + } + return (T) op; + } + + private void renewLocalCacheIfPossible(Operation op, RepositoryConnection con) { + Optional wrapperOpt = findWrapper(con, CachingRepositoryConnection.class); + if (wrapperOpt.isPresent()) { + CachingRepositoryConnection cachingCon = wrapperOpt.get(); + if (op instanceof ResultCachingGraphQuery) { + cachingCon.renewLocalResultCache((ResultCachingGraphQuery) op); + } else if (op instanceof ResultCachingTupleQuery) { + cachingCon.renewLocalResultCache((ResultCachingTupleQuery) op); + } else if (op instanceof ClearableAwareUpdate) { + ((ClearableAwareUpdate) op).renewClearable(cachingCon); + } + } + } + + private String makeOperationCacheKey( + Class operationType, Class owner, String name) { + return operationType.getSimpleName() + ":" + owner.getName() + ":" + name; + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationcache/OperationCacheConfig.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationcache/OperationCacheConfig.java new file mode 100644 index 00000000000..fa2afc325c7 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationcache/OperationCacheConfig.java @@ -0,0 +1,27 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.operationcache; + +import org.springframework.boot.autoconfigure.AutoConfiguration; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.boot.context.properties.EnableConfigurationProperties; +import org.springframework.context.annotation.Configuration; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@AutoConfiguration +@ConditionalOnProperty("rdf4j.spring.operationcache.enabled") +@EnableConfigurationProperties(OperationCacheProperties.class) +public class OperationCacheConfig { +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationcache/OperationCacheProperties.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationcache/OperationCacheProperties.java new file mode 100644 index 00000000000..f613b332135 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationcache/OperationCacheProperties.java @@ -0,0 +1,31 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.operationcache; + +import org.springframework.boot.context.properties.ConfigurationProperties; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@ConfigurationProperties(prefix = "rdf4j.spring.operationcache") +public class OperationCacheProperties { + private boolean enabled = false; + + public boolean isEnabled() { + return enabled; + } + + public void setEnabled(boolean enabled) { + this.enabled = enabled; + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationcache/package-info.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationcache/package-info.java new file mode 100644 index 00000000000..c02925a6541 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationcache/package-info.java @@ -0,0 +1,31 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +/** + * + * + *

Rdf4j-Spring OperationCache

+ * + * Provides connection-level caching of SPARQL operations. + * + *

+ * To enable, set: rdf4j.spring.operationcache.enabled=true. + * + *

+ * If enabled, the {@link org.eclipse.rdf4j.spring.support.RDF4JTemplate Rdf4JTemplate}, set up by + * {@link org.eclipse.rdf4j.spring.RDF4JConfig}, will use the + * {@link org.eclipse.rdf4j.spring.operationcache.CachingOperationInstantiator CachingOperationInstantiator} to generate + * new SPARQL operations instead of the default implementation. + * + * @since 4.0.0 + * @author Florian Kleedorfer + */ +package org.eclipse.rdf4j.spring.operationcache; diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/LoggingGraphQuery.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/LoggingGraphQuery.java new file mode 100644 index 00000000000..6552382cba4 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/LoggingGraphQuery.java @@ -0,0 +1,44 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.operationlog; + +import org.eclipse.rdf4j.query.GraphQuery; +import org.eclipse.rdf4j.query.GraphQueryResult; +import org.eclipse.rdf4j.query.QueryEvaluationException; +import org.eclipse.rdf4j.rio.RDFHandler; +import org.eclipse.rdf4j.rio.RDFHandlerException; +import org.eclipse.rdf4j.spring.operationlog.log.OperationLog; +import org.eclipse.rdf4j.spring.support.query.DelegatingGraphQuery; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class LoggingGraphQuery extends DelegatingGraphQuery { + + private final OperationLog operationLog; + + public LoggingGraphQuery(GraphQuery delegate, OperationLog operationLog) { + super(delegate); + this.operationLog = operationLog; + } + + @Override + public GraphQueryResult evaluate() throws QueryEvaluationException { + return operationLog.runWithLog(getDelegate(), () -> getDelegate().evaluate()); + } + + @Override + public void evaluate(RDFHandler handler) throws QueryEvaluationException, RDFHandlerException { + operationLog.runWithLog(getDelegate(), () -> getDelegate().evaluate(handler)); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/LoggingRepositoryConnection.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/LoggingRepositoryConnection.java new file mode 100644 index 00000000000..eedd29fc5e1 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/LoggingRepositoryConnection.java @@ -0,0 +1,294 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.operationlog; + +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.io.Reader; +import java.lang.invoke.MethodHandles; +import java.net.URL; + +import org.eclipse.rdf4j.common.iteration.CloseableIteration; +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.Resource; +import org.eclipse.rdf4j.model.Statement; +import org.eclipse.rdf4j.model.Value; +import org.eclipse.rdf4j.query.GraphQuery; +import org.eclipse.rdf4j.query.MalformedQueryException; +import org.eclipse.rdf4j.query.QueryLanguage; +import org.eclipse.rdf4j.query.TupleQuery; +import org.eclipse.rdf4j.query.Update; +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.eclipse.rdf4j.repository.RepositoryException; +import org.eclipse.rdf4j.repository.RepositoryResult; +import org.eclipse.rdf4j.repository.base.RepositoryConnectionWrapper; +import org.eclipse.rdf4j.rio.RDFFormat; +import org.eclipse.rdf4j.rio.RDFParseException; +import org.eclipse.rdf4j.spring.dao.exception.RDF4JSpringException; +import org.eclipse.rdf4j.spring.operationlog.log.OperationLog; +import org.eclipse.rdf4j.spring.operationlog.log.PseudoOperation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class LoggingRepositoryConnection extends RepositoryConnectionWrapper { + private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + OperationLog operationLog; + + public LoggingRepositoryConnection(RepositoryConnection delegate, OperationLog operationLog) { + super(delegate.getRepository(), delegate); + this.operationLog = operationLog; + } + + @Override + public TupleQuery prepareTupleQuery(QueryLanguage ql, String queryString, String baseURI) + throws MalformedQueryException, RepositoryException { + logWarning(); + return new LoggingTupleQuery( + getDelegate().prepareTupleQuery(ql, queryString, baseURI), operationLog); + } + + @Override + public TupleQuery prepareTupleQuery(String query) + throws RepositoryException, MalformedQueryException { + logWarning(); + return new LoggingTupleQuery(getDelegate().prepareTupleQuery(query), operationLog); + } + + @Override + public TupleQuery prepareTupleQuery(QueryLanguage ql, String query) + throws RepositoryException, MalformedQueryException { + logWarning(); + return new LoggingTupleQuery(getDelegate().prepareTupleQuery(ql, query), operationLog); + } + + @Override + public GraphQuery prepareGraphQuery(QueryLanguage ql, String queryString, String baseURI) + throws MalformedQueryException, RepositoryException { + logWarning(); + return new LoggingGraphQuery( + getDelegate().prepareGraphQuery(ql, queryString, baseURI), operationLog); + } + + @Override + public GraphQuery prepareGraphQuery(String query) + throws RepositoryException, MalformedQueryException { + logWarning(); + return new LoggingGraphQuery(getDelegate().prepareGraphQuery(query), operationLog); + } + + @Override + public GraphQuery prepareGraphQuery(QueryLanguage ql, String query) + throws RepositoryException, MalformedQueryException { + logWarning(); + return new LoggingGraphQuery(getDelegate().prepareGraphQuery(ql, query), operationLog); + } + + @Override + public Update prepareUpdate(QueryLanguage ql, String updateString, String baseURI) + throws MalformedQueryException, RepositoryException { + logWarning(); + return new LoggingUpdate( + getDelegate().prepareUpdate(ql, updateString, baseURI), operationLog); + } + + @Override + public RepositoryResult getStatements( + Resource subj, IRI pred, Value obj, Resource... contexts) throws RepositoryException { + logWarning(); + return operationLog.runWithLog( + PseudoOperation.forGetSatements(subj, pred, obj, contexts), + () -> getDelegate().getStatements(subj, pred, obj, contexts)); + } + + @Override + public void add(RepositoryResult statements, Resource... contexts) + throws RepositoryException { + operationLog.runWithLog( + PseudoOperation.forAdd(statements, contexts), + () -> getDelegate().add(statements, contexts)); + } + + @Override + public void add(File file, String baseURI, RDFFormat dataFormat, Resource... contexts) + throws IOException, RDFParseException, RepositoryException { + operationLog.runWithLog( + PseudoOperation.forAdd(file, baseURI, dataFormat, contexts), + wrapInRuntimeException( + () -> getDelegate().add(file, baseURI, dataFormat, contexts))); + } + + @Override + public void add(InputStream in, String baseURI, RDFFormat dataFormat, Resource... contexts) + throws IOException, RDFParseException, RepositoryException { + operationLog.runWithLog( + PseudoOperation.forAdd(in, baseURI, dataFormat, contexts), + wrapInRuntimeException(() -> getDelegate().add(in, baseURI, dataFormat, contexts))); + } + + @Override + public void add(Iterable statements, Resource... contexts) + throws RepositoryException { + operationLog.runWithLog( + PseudoOperation.forAdd(statements, contexts), + () -> getDelegate().add(statements, contexts)); + } + + @Override + public void add( + CloseableIteration statementIter, Resource... contexts) + throws RepositoryException { + operationLog.runWithLog( + PseudoOperation.forAdd(statementIter, contexts), + wrapInRuntimeException(() -> getDelegate().add(statementIter, contexts))); + } + + @Override + public void add(Reader reader, String baseURI, RDFFormat dataFormat, Resource... contexts) + throws IOException, RDFParseException, RepositoryException { + operationLog.runWithLog( + PseudoOperation.forAdd(reader, baseURI, dataFormat, contexts), + wrapInRuntimeException( + () -> getDelegate().add(reader, baseURI, dataFormat, contexts))); + } + + @Override + public void add(Resource subject, IRI predicate, Value object, Resource... contexts) + throws RepositoryException { + operationLog.runWithLog( + PseudoOperation.forAdd(subject, predicate, object, contexts), + () -> getDelegate().add(subject, predicate, object, contexts)); + } + + @Override + public void add(Statement st, Resource... contexts) throws RepositoryException { + operationLog.runWithLog( + PseudoOperation.forAdd(st, contexts), () -> getDelegate().add(st, contexts)); + } + + @Override + public void add(URL url, String baseURI, RDFFormat dataFormat, Resource... contexts) + throws IOException, RDFParseException, RepositoryException { + operationLog.runWithLog( + PseudoOperation.forAdd(url, baseURI, dataFormat, contexts), + wrapInRuntimeException( + () -> getDelegate().add(url, baseURI, dataFormat, contexts))); + } + + @Override + public void remove(RepositoryResult statements, Resource... contexts) + throws RepositoryException { + operationLog.runWithLog( + PseudoOperation.forRemove(statements, contexts), + wrapInRuntimeException(() -> getDelegate().remove(statements, contexts))); + } + + @Override + public void remove(Iterable statements, Resource... contexts) + throws RepositoryException { + operationLog.runWithLog( + PseudoOperation.forRemove(statements, contexts), + () -> getDelegate().remove(statements, contexts)); + } + + @Override + public void remove( + CloseableIteration statementIter, Resource... contexts) + throws RepositoryException { + operationLog.runWithLog( + PseudoOperation.forRemove(statementIter, contexts), + wrapInRuntimeException(() -> getDelegate().remove(statementIter, contexts))); + } + + @Override + public void remove(Resource subject, IRI predicate, Value object, Resource... contexts) + throws RepositoryException { + operationLog.runWithLog( + PseudoOperation.forRemove(subject, predicate, object, contexts), + () -> getDelegate().remove(subject, predicate, object, contexts)); + } + + @Override + public void remove(Statement st, Resource... contexts) throws RepositoryException { + operationLog.runWithLog( + PseudoOperation.forRemove(st, contexts), () -> getDelegate().remove(st, contexts)); + } + + @Override + public void clear(Resource... contexts) throws RepositoryException { + operationLog.runWithLog( + PseudoOperation.forClear(contexts), () -> getDelegate().clear((contexts))); + } + + @Override + public RepositoryResult getStatements( + Resource subj, IRI pred, Value obj, boolean includeInferred, Resource... contexts) + throws RepositoryException { + return operationLog.runWithLog( + PseudoOperation.forGetSatements(subj, pred, obj, includeInferred, contexts), + () -> getDelegate().getStatements(subj, pred, obj, includeInferred, contexts)); + } + + @Override + public boolean hasStatement( + Resource subj, IRI pred, Value obj, boolean includeInferred, Resource... contexts) + throws RepositoryException { + return operationLog.runWithLog( + PseudoOperation.forHasStatement(subj, pred, obj, includeInferred, contexts), + () -> getDelegate().hasStatement(subj, pred, obj, includeInferred, contexts)); + } + + @Override + public boolean hasStatement(Statement st, boolean includeInferred, Resource... contexts) + throws RepositoryException { + return operationLog.runWithLog( + PseudoOperation.forHasStatement(st, includeInferred, contexts), + () -> getDelegate().hasStatement(st, includeInferred, contexts)); + } + + @Override + public long size(Resource... contexts) throws RepositoryException { + return operationLog.runWithLog( + PseudoOperation.forSize(contexts), () -> getDelegate().size(contexts)); + } + + @Override + public void removeNamespace(String prefix) throws RepositoryException { + super.removeNamespace(prefix); + } + + private Runnable wrapInRuntimeException(ExceptionThrowingRunnable task) { + return () -> { + try { + task.run(); + } catch (Exception e) { + throw new RDF4JSpringException(e); + } + }; + } + + private interface ExceptionThrowingRunnable { + void run() throws Exception; + } + + private void logWarning() { + logger.warn( + "rdf4j operations (queries and updates) are being timed and logged. " + + "Don't do this in production as the log is not limited in size! " + + "You can disable this feature by setting the configuration property " + + "'org.eclipse.rdf4j.spring.operationlog.enabled' to 'false'"); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/LoggingRepositoryConnectionFactory.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/LoggingRepositoryConnectionFactory.java new file mode 100644 index 00000000000..1844872d9ba --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/LoggingRepositoryConnectionFactory.java @@ -0,0 +1,41 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.operationlog; + +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.eclipse.rdf4j.spring.operationlog.log.OperationLog; +import org.eclipse.rdf4j.spring.support.connectionfactory.DelegatingRepositoryConnectionFactory; +import org.eclipse.rdf4j.spring.support.connectionfactory.RepositoryConnectionFactory; +import org.eclipse.rdf4j.spring.util.RepositoryConnectionWrappingUtils; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class LoggingRepositoryConnectionFactory extends DelegatingRepositoryConnectionFactory { + + private final OperationLog operationLog; + + public LoggingRepositoryConnectionFactory( + RepositoryConnectionFactory delegate, OperationLog operationLog) { + super(delegate); + this.operationLog = operationLog; + } + + @Override + public RepositoryConnection getConnection() { + return RepositoryConnectionWrappingUtils.wrapOnce( + getDelegate().getConnection(), + con -> new LoggingRepositoryConnection(con, operationLog), + LoggingRepositoryConnection.class); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/LoggingTupleQuery.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/LoggingTupleQuery.java new file mode 100644 index 00000000000..f06c03cda5e --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/LoggingTupleQuery.java @@ -0,0 +1,45 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.operationlog; + +import org.eclipse.rdf4j.query.QueryEvaluationException; +import org.eclipse.rdf4j.query.TupleQuery; +import org.eclipse.rdf4j.query.TupleQueryResult; +import org.eclipse.rdf4j.query.TupleQueryResultHandler; +import org.eclipse.rdf4j.query.TupleQueryResultHandlerException; +import org.eclipse.rdf4j.spring.operationlog.log.OperationLog; +import org.eclipse.rdf4j.spring.support.query.DelegatingTupleQuery; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class LoggingTupleQuery extends DelegatingTupleQuery { + + private final OperationLog operationLog; + + public LoggingTupleQuery(TupleQuery delegate, OperationLog operationLog) { + super(delegate); + this.operationLog = operationLog; + } + + @Override + public TupleQueryResult evaluate() throws QueryEvaluationException { + return operationLog.runWithLog(getDelegate(), () -> getDelegate().evaluate()); + } + + @Override + public void evaluate(TupleQueryResultHandler handler) + throws QueryEvaluationException, TupleQueryResultHandlerException { + operationLog.runWithLog(getDelegate(), () -> getDelegate().evaluate(handler)); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/LoggingUpdate.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/LoggingUpdate.java new file mode 100644 index 00000000000..573b87a6ee0 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/LoggingUpdate.java @@ -0,0 +1,42 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.operationlog; + +import java.lang.invoke.MethodHandles; + +import org.eclipse.rdf4j.query.Update; +import org.eclipse.rdf4j.query.UpdateExecutionException; +import org.eclipse.rdf4j.spring.operationlog.log.OperationLog; +import org.eclipse.rdf4j.spring.support.query.DelegatingUpdate; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class LoggingUpdate extends DelegatingUpdate { + + private final OperationLog operationLog; + + private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + + public LoggingUpdate(Update delegate, OperationLog operationLog) { + super(delegate); + this.operationLog = operationLog; + } + + @Override + public void execute() throws UpdateExecutionException { + operationLog.runWithLog(getDelegate(), () -> getDelegate().execute()); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/OperationLogConfig.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/OperationLogConfig.java new file mode 100644 index 00000000000..0af50df9655 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/OperationLogConfig.java @@ -0,0 +1,33 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.operationlog; + +import org.eclipse.rdf4j.spring.operationlog.log.OperationLog; +import org.springframework.boot.autoconfigure.AutoConfiguration; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.boot.context.properties.EnableConfigurationProperties; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@AutoConfiguration +@ConditionalOnProperty(prefix = "rdf4j.spring.operationlog", name = "enabled") +@EnableConfigurationProperties(OperationLogProperties.class) +public class OperationLogConfig { + @Bean + OperationLog getOperationLog() { + return new OperationLog(); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/OperationLogProperties.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/OperationLogProperties.java new file mode 100644 index 00000000000..fa3a20f552f --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/OperationLogProperties.java @@ -0,0 +1,31 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.operationlog; + +import org.springframework.boot.context.properties.ConfigurationProperties; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@ConfigurationProperties(prefix = "rdf4j.spring.operationlog") +public class OperationLogProperties { + private boolean enabled = false; + + public boolean isEnabled() { + return enabled; + } + + public void setEnabled(boolean enabled) { + this.enabled = enabled; + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/log/OperationExecutionStats.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/log/OperationExecutionStats.java new file mode 100644 index 00000000000..1cb235e49ef --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/log/OperationExecutionStats.java @@ -0,0 +1,69 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.operationlog.log; + +import org.eclipse.rdf4j.query.Operation; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class OperationExecutionStats { + private final int bindingsHashCode; + private final String operation; + private final long start; + private Long end = null; + private boolean failed = false; + + public OperationExecutionStats(String operation, int bindingsHashCode) { + this.bindingsHashCode = bindingsHashCode; + this.operation = operation; + this.start = System.currentTimeMillis(); + } + + public static OperationExecutionStats of(Operation operation) { + return new OperationExecutionStats( + operation.toString(), operation.getBindings().hashCode()); + } + + public static OperationExecutionStats of(PseudoOperation operation) { + return new OperationExecutionStats(operation.getOperation(), operation.getValuesHash()); + } + + public void operationSuccessful() { + this.end = System.currentTimeMillis(); + } + + public void operationFailed() { + this.end = System.currentTimeMillis(); + this.failed = true; + } + + public String getOperation() { + return operation; + } + + public int getBindingsHashCode() { + return bindingsHashCode; + } + + public long getQueryDuration() { + if (this.end == null) { + throw new IllegalStateException("Cannot calculate duration - end is null"); + } + return end - start; + } + + public boolean isFailed() { + return failed; + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/log/OperationExecutionStatsConsumer.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/log/OperationExecutionStatsConsumer.java new file mode 100644 index 00000000000..d0ac852c60b --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/log/OperationExecutionStatsConsumer.java @@ -0,0 +1,20 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.operationlog.log; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public interface OperationExecutionStatsConsumer { + void consumeOperationExecutionStats(OperationExecutionStats operationExecutionStats); +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/log/OperationLog.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/log/OperationLog.java new file mode 100644 index 00000000000..4a4c2d8fd08 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/log/OperationLog.java @@ -0,0 +1,95 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.operationlog.log; + +import java.util.Objects; +import java.util.function.Supplier; + +import org.eclipse.rdf4j.query.Operation; +import org.eclipse.rdf4j.spring.operationlog.log.slf4j.DebuggingOperationExecutionStatsConsumer; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class OperationLog { + + private OperationExecutionStatsConsumer statsConsumer; + + public OperationLog(OperationExecutionStatsConsumer statsConsumer) { + Objects.requireNonNull(statsConsumer); + this.statsConsumer = statsConsumer; + } + + @Autowired(required = false) + public void setStatsConsumer(OperationExecutionStatsConsumer statsConsumer) { + this.statsConsumer = statsConsumer; + } + + public OperationLog() { + this.statsConsumer = new DebuggingOperationExecutionStatsConsumer(); + } + + public void runWithLog(Operation operation, Runnable action) { + runWithLog(makeStats(operation), action); + } + + public T runWithLog(Operation operation, Supplier supplier) { + return runWithLog(makeStats(operation), supplier); + } + + public void runWithLog(PseudoOperation operation, Runnable action) { + runWithLog(makeStats(operation), action); + } + + public T runWithLog(PseudoOperation operation, Supplier supplier) { + return runWithLog(makeStats(operation), supplier); + } + + private OperationExecutionStats makeStats(Operation operation) { + Objects.requireNonNull(operation); + return OperationExecutionStats.of(operation); + } + + private OperationExecutionStats makeStats(PseudoOperation operation) { + Objects.requireNonNull(operation); + return OperationExecutionStats.of(operation); + } + + private void runWithLog(OperationExecutionStats stats, Runnable action) { + Objects.requireNonNull(action); + try { + action.run(); + stats.operationSuccessful(); + } catch (Throwable t) { + stats.operationFailed(); + throw t; + } finally { + statsConsumer.consumeOperationExecutionStats(stats); + } + } + + private T runWithLog(OperationExecutionStats stats, Supplier supplier) { + Objects.requireNonNull(supplier); + try { + T result = supplier.get(); + stats.operationSuccessful(); + return result; + } catch (Throwable t) { + stats.operationFailed(); + throw t; + } finally { + statsConsumer.consumeOperationExecutionStats(stats); + } + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/log/PseudoOperation.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/log/PseudoOperation.java new file mode 100644 index 00000000000..44632544f02 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/log/PseudoOperation.java @@ -0,0 +1,80 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.operationlog.log; + +import static java.util.stream.Collectors.joining; + +import java.util.Arrays; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class PseudoOperation { + private final String operation; + private final int valuesHash; + + private PseudoOperation(String operation, int valuesHash) { + this.operation = operation; + this.valuesHash = valuesHash; + } + + public static PseudoOperation forGetSatements(Object... args) { + return forMethodNameAndArgs("getStatements", args); + } + + public static PseudoOperation forAdd(Object... args) { + String argsString = getArgsString(args); + return forMethodNameAndArgs("add", args); + } + + public static PseudoOperation forRemove(Object... args) { + return forMethodNameAndArgs("remove", args); + } + + public static PseudoOperation forClear(Object... args) { + String argsString = getArgsString(args); + return forMethodNameAndArgs("clear", args); + } + + public static PseudoOperation forHasStatement(Object... args) { + return forMethodNameAndArgs("hasStatement", args); + } + + public static PseudoOperation forMethodNameAndArgs(String methodName, Object... args) { + String argsString = getArgsString(args); + return new PseudoOperation( + "RepositoryConnection." + methodName + "(" + argsString + ")", + Arrays.hashCode(args)); + } + + public static PseudoOperation forSize(Object... args) { + return forMethodNameAndArgs("size", args); + } + + public String getOperation() { + return operation; + } + + public int getValuesHash() { + return valuesHash; + } + + private static String getArgsString(Object[] args) { + if (args == null || args.length == 0) { + return ""; + } + return Arrays.stream(args) + .map(o -> o == null ? "[null]" : o.getClass().getSimpleName()) + .collect(joining(", ")); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/log/jmx/AggregatedOperationStats.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/log/jmx/AggregatedOperationStats.java new file mode 100644 index 00000000000..3b21514662f --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/log/jmx/AggregatedOperationStats.java @@ -0,0 +1,124 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.operationlog.log.jmx; + +import java.util.HashSet; +import java.util.Set; + +import org.eclipse.rdf4j.spring.dao.exception.RDF4JSpringException; +import org.eclipse.rdf4j.spring.operationlog.log.OperationExecutionStats; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class AggregatedOperationStats implements Cloneable { + private String operation = null; + private int count = 0; + private int failed = 0; + private long cumulativeTime = 0; + private Integer uniqueBindingsCount = null; + private Set bindingsHashcodes = new HashSet<>(); + + public AggregatedOperationStats() { + } + + public static AggregatedOperationStats build(OperationExecutionStats stats) { + return new AggregatedOperationStats().buildNext(stats); + } + + @Override + protected Object clone() { + AggregatedOperationStats theClone; + try { + theClone = (AggregatedOperationStats) super.clone(); + } catch (CloneNotSupportedException e) { + throw new RDF4JSpringException("could not clone", e); + } + theClone.operation = this.operation; + theClone.count = this.count; + theClone.failed = this.failed; + theClone.cumulativeTime = this.cumulativeTime; + theClone.uniqueBindingsCount = this.uniqueBindingsCount; + theClone.bindingsHashcodes = new HashSet<>(); + theClone.bindingsHashcodes.addAll(this.bindingsHashcodes); + return theClone; + } + + public void setUniqueBindingsCount(int uniqueBindingsCount) { + this.uniqueBindingsCount = uniqueBindingsCount; + } + + public void setOperation(String operation) { + this.operation = operation; + } + + public void setCount(int count) { + this.count = count; + } + + public void setCumulativeTime(long cumulativeTime) { + this.cumulativeTime = cumulativeTime; + } + + public void setFailed(int failed) { + this.failed = failed; + } + + public long getAverageTime() { + return this.cumulativeTime / this.count; + } + + public String getOperation() { + return operation; + } + + public int getCount() { + return count; + } + + public int getFailed() { + return failed; + } + + public long getCumulativeTime() { + return cumulativeTime; + } + + public int getUniqueBindingsCount() { + return uniqueBindingsCount != null ? uniqueBindingsCount : bindingsHashcodes.size(); + } + + public AggregatedOperationStats buildNext(OperationExecutionStats stats) { + String newOperation = stats.getOperation(); + AggregatedOperationStats newStats; + newStats = (AggregatedOperationStats) this.clone(); + if (newStats.operation != null) { + if (!newStats.operation.equals(newOperation)) { + throw new IllegalArgumentException( + "Cannot add to aggregated stats: operations differ. Existing operation:\n" + + newStats.operation + + "\n, new operation:\n" + + newOperation); + } + } else { + newStats.operation = newOperation; + } + newStats.bindingsHashcodes.add(stats.getBindingsHashCode()); + newStats.count += 1; + if (stats.isFailed()) { + newStats.failed += 1; + } + newStats.cumulativeTime += stats.getQueryDuration(); + return newStats; + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/log/jmx/OperationLogJmxConfig.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/log/jmx/OperationLogJmxConfig.java new file mode 100644 index 00000000000..3ac2f77e705 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/log/jmx/OperationLogJmxConfig.java @@ -0,0 +1,49 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.operationlog.log.jmx; + +import java.util.Map; + +import org.eclipse.rdf4j.spring.operationlog.log.OperationLog; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnBean; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.boot.context.properties.EnableConfigurationProperties; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.jmx.export.MBeanExporter; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@Configuration +@ConditionalOnBean(OperationLog.class) +@ConditionalOnProperty(value = "rdf4j.spring.operationlog.jmx.enabled") +@EnableConfigurationProperties(OperationLogJmxProperties.class) +public class OperationLogJmxConfig { + + @Bean + public OperationStatsBean getOperationStatsBean() { + return new OperationStatsBean(); + } + + @Bean + public MBeanExporter getMBeanExporter(@Autowired OperationStatsBean operationStatsBean) { + MBeanExporter exporter = new MBeanExporter(); + exporter.setBeans( + Map.of( + "org.eclipse.rdf4j.spring.operationlog:name=OperationStats", + operationStatsBean)); + return exporter; + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/log/jmx/OperationLogJmxProperties.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/log/jmx/OperationLogJmxProperties.java new file mode 100644 index 00000000000..aca248ba97e --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/log/jmx/OperationLogJmxProperties.java @@ -0,0 +1,31 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.operationlog.log.jmx; + +import org.springframework.boot.context.properties.ConfigurationProperties; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@ConfigurationProperties(prefix = "rdf4j.spring.operationlog.jmx") +public class OperationLogJmxProperties { + boolean enabled = false; + + public boolean isEnabled() { + return enabled; + } + + public void setEnabled(boolean enabled) { + this.enabled = enabled; + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/log/jmx/OperationStatsBean.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/log/jmx/OperationStatsBean.java new file mode 100644 index 00000000000..9fb0758f7f3 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/log/jmx/OperationStatsBean.java @@ -0,0 +1,102 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.operationlog.log.jmx; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.stream.Collectors; + +import org.eclipse.rdf4j.spring.operationlog.log.OperationExecutionStats; +import org.eclipse.rdf4j.spring.operationlog.log.OperationExecutionStatsConsumer; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class OperationStatsBean implements OperationStatsMXBean, OperationExecutionStatsConsumer { + + private Map stats = new HashMap<>(); + + private final ExecutorService executorService = Executors.newSingleThreadExecutor(); + + @Override + public List getAggregatedOperationStats() { + return stats.values() + .stream() + .sorted( + (l, r) -> { + int cmp = r.getCount() - l.getCount(); + if (cmp != 0) { + return cmp; + } + return (int) (r.getCumulativeTime() - l.getCumulativeTime()); + }) + .collect(Collectors.toList()); + } + + @Override + public int getDistinctOperationCount() { + return stats.size(); + } + + @Override + public int getDistinctOperationExecutionCount() { + return stats.values() + .stream() + .mapToInt(AggregatedOperationStats::getUniqueBindingsCount) + .sum(); + } + + @Override + public int getTotalOperationExecutionCount() { + return stats.values().stream().mapToInt(AggregatedOperationStats::getCount).sum(); + } + + @Override + public long getTotalOperationExecutionTime() { + return stats.values().stream().mapToLong(AggregatedOperationStats::getCumulativeTime).sum(); + } + + @Override + public int getTotalFailedOperationExecutionCount() { + return stats.values().stream().mapToInt(AggregatedOperationStats::getFailed).sum(); + } + + @Override + public void reset() { + executorService.execute( + () -> { + Map old = stats; + stats = new HashMap<>(); + old.clear(); + }); + } + + @Override + public void consumeOperationExecutionStats(OperationExecutionStats operationExecutionStats) { + executorService.execute( + () -> { + Map newStats = new HashMap<>(stats); + AggregatedOperationStats aggregated = stats.get(operationExecutionStats.getOperation()); + if (aggregated == null) { + aggregated = AggregatedOperationStats.build(operationExecutionStats); + } else { + aggregated = aggregated.buildNext(operationExecutionStats); + } + newStats.put(operationExecutionStats.getOperation(), aggregated); + stats = newStats; + }); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/log/jmx/OperationStatsMXBean.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/log/jmx/OperationStatsMXBean.java new file mode 100644 index 00000000000..57c3edd94d5 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/log/jmx/OperationStatsMXBean.java @@ -0,0 +1,34 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.operationlog.log.jmx; + +import java.util.List; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public interface OperationStatsMXBean { + void reset(); + + int getDistinctOperationCount(); + + int getDistinctOperationExecutionCount(); + + int getTotalOperationExecutionCount(); + + int getTotalFailedOperationExecutionCount(); + + long getTotalOperationExecutionTime(); + + List getAggregatedOperationStats(); +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/log/slf4j/DebuggingOperationExecutionStatsConsumer.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/log/slf4j/DebuggingOperationExecutionStatsConsumer.java new file mode 100644 index 00000000000..df893ce5dcb --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/log/slf4j/DebuggingOperationExecutionStatsConsumer.java @@ -0,0 +1,41 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.operationlog.log.slf4j; + +import java.lang.invoke.MethodHandles; + +import org.eclipse.rdf4j.spring.operationlog.log.OperationExecutionStats; +import org.eclipse.rdf4j.spring.operationlog.log.OperationExecutionStatsConsumer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class DebuggingOperationExecutionStatsConsumer implements OperationExecutionStatsConsumer { + private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + + public DebuggingOperationExecutionStatsConsumer() { + } + + @Override + public void consumeOperationExecutionStats(OperationExecutionStats operationExecutionStats) { + if (logger.isDebugEnabled()) { + logger.debug( + "query duration: {} millis; bindingshash: {}; query: {}", + operationExecutionStats.getQueryDuration(), + operationExecutionStats.getBindingsHashCode(), + operationExecutionStats.getOperation()); + } + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/package-info.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/package-info.java new file mode 100644 index 00000000000..904a356fb7e --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/operationlog/package-info.java @@ -0,0 +1,38 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +/** + * + * + *

Rdf4j-Spring OperationLog

+ * + * Provides query/update-level logging and timing for SPARQL operations. + * + *
    + *
  • Use the property rdf4j.spring.operationlog.enabled=true to enable, in which case each query is + * logged through slf4j. + *
  • Use the property rdf4j.spring.operationlog.jmx.enabled=true to replace slf4j logging by logging + * using a JMX MXBean, + * org.eclipse.rdf4j.operationlog.OperationStats + *
+ * + *

+ * If enabled, bean of type {@link org.eclipse.rdf4j.spring.operationlog.log.OperationLog OperationLog} is instantiated + * that can be used to create a {@link org.eclipse.rdf4j.spring.operationlog.LoggingRepositoryConnectionFactory + * LoggingRepositoryConnectionFactory}, wrapping the + * {@link org.eclipse.rdf4j.spring.support.connectionfactory.RepositoryConnectionFactory RepositoryConnectionFactory} + * used by the application. This is done using spring-autoconfiguration by {@link org.eclipse.rdf4j.spring.RDF4JConfig + * Rdf4JConfig}. + * + * @since 4.0.0 + * @author Florian Kleedorfer + */ +package org.eclipse.rdf4j.spring.operationlog; diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/package-info.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/package-info.java new file mode 100644 index 00000000000..a181ffa8f0b --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/package-info.java @@ -0,0 +1,69 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +/** + * + * + *

Rdf4J-Spring

+ * + * Configures beans for Rdf4J access. Uses Spring's autoconfiguration mechanism at startup to determine which subsystems + * to use. The following example shows a spring configuration class enabling all features and using an in-memory + * repository. The DAO classes (subclasses of {@link org.eclipse.rdf4j.spring.dao.RDF4JDao Rdf4JDao}), assumed to be + * under com.example.your.app + * , are autodetected. + * + *
+ *
+ * @Configuration
+ * @Import(Rdf4JConfig.class)
+ * @ComponentScan(
+ *         value = "com.example.your.app",
+ *         includeFilters =
+ *                 @ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = Rdf4JDao.class)
+ * @TestPropertySource(
+ *         properties = {
+ *             "rdf4j.spring.repository.inmemory.enabled=true",
+ *             "rdf4j.spring.pool.enabled=true",
+ *             "rdf4j.spring.operationcache.enabled=true",
+ *             "rdf4j.spring.operationlog.enabled=true",
+ *             "rdf4j.spring.resultcache.enabled=true",
+ *             "rdf4j.spring.tx.enabled=true",
+ *         })
+ * public class Rdf4JStorageConfiguration {
+ *
+ *     // beans, if any (you may not need any - all your DAOs could be autodetected and all other beans may
+ *     // be configured elsewhere)
+ *
+ * }
+ * 
+ * + *

+ * For more information on the subsystems, please refer to their package-infos: + * + *

    + *
  • {@link org.eclipse.rdf4j.spring.operationcache Rdf4J-Spring OperationCache} + *
  • {@link org.eclipse.rdf4j.spring.operationlog Rdf4J-Spring OperationLog} + *
  • {@link org.eclipse.rdf4j.spring.pool Rdf4J-Spring Pool} + *
  • {@link org.eclipse.rdf4j.spring.repository Rdf4J-Spring Repository} + *
  • {@link org.eclipse.rdf4j.spring.resultcache Rdf4J-Spring ResultCache} + *
  • {@link org.eclipse.rdf4j.spring.tx Rdf4J-Spring Tx} + *
+ *

+ * + *

+ * This software has been developed in the project 'BIM-Interoperables Merkmalservice', funded by the Austrian Research + * Promotion Agency and Ă–sterreichische Bautechnik Veranstaltungs GmbH. + *

+ * + * @since 4.0.0 + * @author Florian Kleedorfer + */ +package org.eclipse.rdf4j.spring; diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/pool/PoolConfig.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/pool/PoolConfig.java new file mode 100644 index 00000000000..c621646082f --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/pool/PoolConfig.java @@ -0,0 +1,27 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.pool; + +import org.springframework.boot.autoconfigure.AutoConfiguration; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.boot.context.properties.EnableConfigurationProperties; +import org.springframework.context.annotation.Configuration; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@AutoConfiguration +@ConditionalOnProperty(prefix = "rdf4j.spring.pool", name = "enabled") +@EnableConfigurationProperties(PoolProperties.class) +public class PoolConfig { +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/pool/PoolProperties.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/pool/PoolProperties.java new file mode 100644 index 00000000000..2a5f4a3f31d --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/pool/PoolProperties.java @@ -0,0 +1,82 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.pool; + +import java.time.Duration; + +import org.springframework.boot.context.properties.ConfigurationProperties; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@ConfigurationProperties(prefix = "rdf4j.spring.pool") +public class PoolProperties { + private boolean enabled = false; + /** + * Maximum number of connections pooled. + */ + private int maxConnections = 20; + + /** + * Minimum number of connections held idle. + */ + private int minIdleConnections = 5; + /** + * Duration (e.g. 30s) between checks for stale connecitons. + */ + private Duration timeBetweenEvictionRuns = Duration.ofSeconds(30); + /** + * Should the pool actively test connections using a SPARQL statement? + */ + private boolean testWhileIdle = true; + + public boolean isEnabled() { + return enabled; + } + + public void setEnabled(boolean enabled) { + this.enabled = enabled; + } + + public int getMaxConnections() { + return maxConnections; + } + + public void setMaxConnections(int maxConnections) { + this.maxConnections = maxConnections; + } + + public int getMinIdleConnections() { + return minIdleConnections; + } + + public void setMinIdleConnections(int minIdleConnections) { + this.minIdleConnections = minIdleConnections; + } + + public Duration getTimeBetweenEvictionRuns() { + return timeBetweenEvictionRuns; + } + + public void setTimeBetweenEvictionRuns(Duration timeBetweenEvictionRuns) { + this.timeBetweenEvictionRuns = timeBetweenEvictionRuns; + } + + public boolean isTestWhileIdle() { + return testWhileIdle; + } + + public void setTestWhileIdle(boolean testWhileIdle) { + this.testWhileIdle = testWhileIdle; + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/pool/PooledConnectionObjectFactory.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/pool/PooledConnectionObjectFactory.java new file mode 100644 index 00000000000..fb7fe83d7b7 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/pool/PooledConnectionObjectFactory.java @@ -0,0 +1,85 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.pool; + +import java.lang.invoke.MethodHandles; + +import org.apache.commons.pool2.BasePooledObjectFactory; +import org.apache.commons.pool2.ObjectPool; +import org.apache.commons.pool2.PooledObject; +import org.apache.commons.pool2.impl.DefaultPooledObject; +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.eclipse.rdf4j.spring.support.connectionfactory.RepositoryConnectionFactory; +import org.eclipse.rdf4j.spring.tx.exception.RepositoryConnectionPoolException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Obtains connections from the delegate factory and manages them in the object pool. + * + * @author Florian Kleedorfer + * @since 4.0.0 + */ +class PooledConnectionObjectFactory extends BasePooledObjectFactory { + private final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private ObjectPool pool; + private final RepositoryConnectionFactory delegate; + + public PooledConnectionObjectFactory(RepositoryConnectionFactory delegate) { + this.delegate = delegate; + } + + @Override + public RepositoryConnection create() throws Exception { + logger.debug( + "Creating pooled connection - obtaining underlying connection from delegate factory"); + try { + return delegate.getConnection(); + } catch (Exception e) { + throw new RepositoryConnectionPoolException( + "Error obtaining RepositoryConnection for pool", e); + } + } + + @Override + public PooledObject wrap(RepositoryConnection con) { + return new DefaultPooledObject<>(new PooledRepositoryConnection(con, pool)); + } + + public void setPool(ObjectPool pool) { + this.pool = pool; + } + + @Override + public void destroyObject(PooledObject pooledObject) throws Exception { + logger.debug("destroying pooled connection - closing underlying connection"); + try { + pooledObject.getObject().close(); + logger.debug("successfully closed underlying connection"); + } catch (Exception e) { + throw new RepositoryConnectionPoolException("Error closing RepositoryConnection", e); + } + } + + @Override + public boolean validateObject(PooledObject p) { + RepositoryConnection con = p.getObject(); + try { + con.prepareTupleQuery("select (1 as ?one) where {}").evaluate().close(); + } catch (Exception e) { + logger.info("Test query on pooled connection caused exception - it will be destroyed"); + return false; + } + logger.debug("pooled connection still works"); + return true; + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/pool/PooledRepositoryConnection.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/pool/PooledRepositoryConnection.java new file mode 100644 index 00000000000..4bec0d0e5fc --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/pool/PooledRepositoryConnection.java @@ -0,0 +1,54 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.pool; + +import java.lang.invoke.MethodHandles; +import java.util.Objects; + +import org.apache.commons.pool2.ObjectPool; +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.eclipse.rdf4j.repository.RepositoryException; +import org.eclipse.rdf4j.repository.base.RepositoryConnectionWrapper; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class PooledRepositoryConnection extends RepositoryConnectionWrapper { + + private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + + private final RepositoryConnection delegate; + + private final ObjectPool pool; + + public PooledRepositoryConnection( + RepositoryConnection delegate, ObjectPool pool) { + super(delegate.getRepository(), delegate); + Objects.requireNonNull(delegate); + Objects.requireNonNull(pool); + this.delegate = delegate; + this.pool = pool; + } + + @Override + public void close() throws RepositoryException { + logger.debug("Close called on pooled RepositoryConnection, returning it to pool"); + try { + pool.returnObject(this); + } catch (Exception e) { + throw new RepositoryException("Error returning connection to pool", e); + } + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/pool/PooledRepositoryConnectionFactory.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/pool/PooledRepositoryConnectionFactory.java new file mode 100644 index 00000000000..d9f8d16b012 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/pool/PooledRepositoryConnectionFactory.java @@ -0,0 +1,72 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.pool; + +import java.lang.invoke.MethodHandles; + +import org.apache.commons.pool2.ObjectPool; +import org.apache.commons.pool2.impl.GenericObjectPool; +import org.apache.commons.pool2.impl.GenericObjectPoolConfig; +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.eclipse.rdf4j.spring.support.connectionfactory.RepositoryConnectionFactory; +import org.eclipse.rdf4j.spring.tx.exception.RepositoryConnectionPoolException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.DisposableBean; + +/** + * Uses the delegate factory to actually obtain connections and provides these connections, managing an internal pool. + * + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class PooledRepositoryConnectionFactory + implements DisposableBean, RepositoryConnectionFactory { + + private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + + private final ObjectPool pool; + + public PooledRepositoryConnectionFactory( + RepositoryConnectionFactory delegateFactory, + GenericObjectPoolConfig config) { + + PooledConnectionObjectFactory factory = new PooledConnectionObjectFactory(delegateFactory); + if (config == null) { + this.pool = new GenericObjectPool<>(factory); + } else { + this.pool = new GenericObjectPool<>(factory, config); + } + factory.setPool(pool); + } + + public PooledRepositoryConnectionFactory(RepositoryConnectionFactory delegateFactory) { + this(delegateFactory, null); + } + + @Override + public void destroy() throws Exception { + logger.info("shutting down RepositoryConnection pool..."); + pool.close(); + logger.info("\tdone"); + } + + @Override + public RepositoryConnection getConnection() { + try { + return pool.borrowObject(); + } catch (Exception e) { + throw new RepositoryConnectionPoolException( + "Cannot obtain RepositoryConnection from pool", e); + } + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/pool/package-info.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/pool/package-info.java new file mode 100644 index 00000000000..8ad9d475fd8 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/pool/package-info.java @@ -0,0 +1,34 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +/** + * + * + *

Rdf4j-Spring Pool

+ * + * Provides pooling of {@link org.eclipse.rdf4j.repository.RepositoryConnection RepositoryConnection}s. + * + *

+ * Enable via rdf4j.spring.pool.enabled=true. + * + *

+ * If enabled, the {@link org.eclipse.rdf4j.spring.RDF4JConfig Rdf4JConfig} will wrap its + * {@link org.eclipse.rdf4j.spring.support.connectionfactory.RepositoryConnectionFactory RepositoryConnectionFactory} in + * a {@link org.eclipse.rdf4j.spring.pool.PooledRepositoryConnectionFactory PooledRepositoryConnectionFactory}. + * + *

+ * For more information on configuration of the pool, see {@link org.eclipse.rdf4j.spring.pool.PoolProperties + * PoolProperties}. + * + * @since 4.0.0 + * @author Florian Kleedorfer + */ +package org.eclipse.rdf4j.spring.pool; diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/repository/inmemory/InMemoryRepositoryConfig.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/repository/inmemory/InMemoryRepositoryConfig.java new file mode 100644 index 00000000000..c722611dd55 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/repository/inmemory/InMemoryRepositoryConfig.java @@ -0,0 +1,43 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.repository.inmemory; + +import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.sail.SailRepository; +import org.eclipse.rdf4j.sail.memory.MemoryStore; +import org.eclipse.rdf4j.sail.shacl.ShaclSail; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.AutoConfiguration; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.boot.context.properties.EnableConfigurationProperties; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +/** + * @author Gabriel Pickl + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@AutoConfiguration +@EnableConfigurationProperties(InMemoryRepositoryProperties.class) +@ConditionalOnProperty("rdf4j.spring.repository.inmemory.enabled") +public class InMemoryRepositoryConfig { + @Bean + public Repository getInMemoryRepository( + @Autowired InMemoryRepositoryProperties repositoryProperties) { + if (repositoryProperties.isUseShaclSail()) { + return new SailRepository(new ShaclSail(new MemoryStore())); + } else { + return new SailRepository(new MemoryStore()); + } + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/repository/inmemory/InMemoryRepositoryProperties.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/repository/inmemory/InMemoryRepositoryProperties.java new file mode 100644 index 00000000000..98bb8106bd2 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/repository/inmemory/InMemoryRepositoryProperties.java @@ -0,0 +1,44 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.repository.inmemory; + +import org.springframework.boot.context.properties.ConfigurationProperties; + +/** + * @author Gabriel Pickl + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@ConfigurationProperties(prefix = "rdf4j.spring.repository.inmemory") +public class InMemoryRepositoryProperties { + private boolean enabled = true; + /** + * Should a SHACL Sail be used? + */ + private boolean useShaclSail = false; + + public boolean isUseShaclSail() { + return useShaclSail; + } + + public void setUseShaclSail(boolean useShaclSail) { + this.useShaclSail = useShaclSail; + } + + public boolean isEnabled() { + return enabled; + } + + public void setEnabled(boolean enabled) { + this.enabled = enabled; + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/repository/package-info.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/repository/package-info.java new file mode 100644 index 00000000000..b45e09188e4 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/repository/package-info.java @@ -0,0 +1,42 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +/** + * + * + *

Rdf4j-Spring Repository

+ * + * Automatically configures {@link org.eclipse.rdf4j.repository.Repository Repository} beans via + * {@link org.eclipse.rdf4j.spring.RDF4JConfig Rdf4JConfig}. + * + *

+ * To configure a remote repostitory, use + * + *

    + *
  • rdf4j.spring.repository.remote.manager-url=[manager-url] + *
  • rdf4j.spring.repository.remote.name=[name] + *
+ * + * (see {@link org.eclipse.rdf4j.spring.repository.remote.RemoteRepositoryProperties RemoteRepositoryProperties}) + * + *

+ * To configure an in-memory Repository use rdf4j.spring.repository.inmemory.enabled=true + * (see {@link org.eclipse.rdf4j.spring.repository.inmemory.InMemoryRepositoryProperties + * InMemoryRepositoryProperties}) + * + *

+ * Note: Exactly one repository has to be configured. + * + * @since 4.0.0 + * @author Gabriel Pickl + * @author Florian Kleedorfer + */ +package org.eclipse.rdf4j.spring.repository; diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/repository/remote/RemoteRepositoryConfig.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/repository/remote/RemoteRepositoryConfig.java new file mode 100644 index 00000000000..41746f41e71 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/repository/remote/RemoteRepositoryConfig.java @@ -0,0 +1,66 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.repository.remote; + +import java.lang.invoke.MethodHandles; + +import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.manager.RemoteRepositoryManager; +import org.eclipse.rdf4j.spring.support.ConfigurationException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.AutoConfiguration; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.boot.context.properties.EnableConfigurationProperties; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +/** + * @author Gabriel Pickl + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@AutoConfiguration +@EnableConfigurationProperties(RemoteRepositoryProperties.class) +@ConditionalOnProperty("rdf4j.spring.repository.remote.manager-url") +public class RemoteRepositoryConfig { + private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + + @Bean + public Repository getRemoteRepository( + @Autowired RemoteRepositoryProperties repositoryProperties) { + Repository repository; + logger.info("Using these repository properties: {}", repositoryProperties); + try { + RemoteRepositoryManager repositoryManager = new RemoteRepositoryManager( + repositoryProperties.getManagerUrl()); + + if (repositoryProperties.isUsernamePasswordConfigured()) { + logger.debug("Set username: {} and password: ****", repositoryProperties.getUsername()); + repositoryManager.setUsernameAndPassword(repositoryProperties.getUsername(), + repositoryProperties.getPassword()); + } + + repositoryManager.init(); + repository = repositoryManager.getRepository(repositoryProperties.getName()); + logger.debug("Successfully initialized repository config: {}", repositoryProperties); + return repository; + } catch (Exception e) { + throw new ConfigurationException( + String.format( + "Unable to retrieve repository for repository config %s", + repositoryProperties), + e); + } + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/repository/remote/RemoteRepositoryProperties.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/repository/remote/RemoteRepositoryProperties.java new file mode 100644 index 00000000000..ee7d2c17d28 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/repository/remote/RemoteRepositoryProperties.java @@ -0,0 +1,96 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.repository.remote; + +import org.hibernate.validator.constraints.Length; +import org.springframework.boot.context.properties.ConfigurationProperties; + +import jakarta.validation.constraints.NotBlank; +import jakarta.validation.constraints.Pattern; + +/** + * @author Gabriel Pickl + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@ConfigurationProperties(prefix = "rdf4j.spring.repository.remote") +public class RemoteRepositoryProperties { + + /** + * URL of the SPARQL endpoint + */ + @NotBlank + @Pattern(regexp = "^(https?|ftp|file)://[-a-zA-Z0-9+&@#/%?=~_|!:,.;]*[-a-zA-Z0-9+&@#/%=~_|]") + private String managerUrl = null; + + /** + * Optional username of the SPARQL endpoint + */ + private String username = null; + + /** + * Optional password of the SPARQL endpoint + */ + private String password = null; + + /** + * Name of the repository + */ + @NotBlank + @Length(min = 1) + private String name = null; + + public String getManagerUrl() { + return managerUrl; + } + + public void setManagerUrl(String managerUrl) { + this.managerUrl = managerUrl; + } + + public String getUsername() { + return username; + } + + public void setUsername(String username) { + this.username = username; + } + + public String getPassword() { + return password; + } + + public void setPassword(String password) { + this.password = password; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public boolean isUsernamePasswordConfigured() { + return username != null && password != null; + } + + @Override + public String toString() { + return "RemoteRepositoryConfig{" + + "managerUrl='" + managerUrl + "'" + + (username != null ? ", username='" + username + "'" : "") + + (password != null ? ", password='****'" : "") + + ", name='" + name + "' }"; + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/CachedGraphQueryResult.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/CachedGraphQueryResult.java new file mode 100644 index 00000000000..44cece194a3 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/CachedGraphQueryResult.java @@ -0,0 +1,91 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.resultcache; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Spliterator; +import java.util.Spliterators; +import java.util.function.Consumer; +import java.util.stream.Stream; +import java.util.stream.StreamSupport; + +import org.eclipse.rdf4j.model.Statement; +import org.eclipse.rdf4j.query.GraphQueryResult; +import org.eclipse.rdf4j.query.QueryEvaluationException; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class CachedGraphQueryResult implements GraphQueryResult { + private final List statements; + private Iterator replayingIterator; + private final Map namespaces; + + CachedGraphQueryResult(List statements, Map namespaces) { + this.statements = new ArrayList<>(statements); + this.namespaces = new HashMap<>(); + this.namespaces.putAll(namespaces); + this.replayingIterator = statements.iterator(); + } + + @Override + public Map getNamespaces() throws QueryEvaluationException { + return namespaces; + } + + @Override + public Iterator iterator() { + return replayingIterator; + } + + @Override + public void close() throws QueryEvaluationException { + this.replayingIterator = null; + } + + @Override + public boolean hasNext() throws QueryEvaluationException { + return replayingIterator.hasNext(); + } + + @Override + public Statement next() throws QueryEvaluationException { + return this.replayingIterator.next(); + } + + @Override + public void remove() throws QueryEvaluationException { + throw new UnsupportedOperationException("Remove is not supported"); + } + + @Override + public Stream stream() { + return StreamSupport.stream( + Spliterators.spliteratorUnknownSize(this.replayingIterator, Spliterator.ORDERED), + false); + } + + @Override + public void forEach(Consumer action) { + statements.forEach(action); + } + + @Override + public Spliterator spliterator() { + return Spliterators.spliteratorUnknownSize(iterator(), Spliterator.ORDERED); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/CachedTupleQueryResult.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/CachedTupleQueryResult.java new file mode 100644 index 00000000000..80b470c4084 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/CachedTupleQueryResult.java @@ -0,0 +1,89 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.resultcache; + +import java.util.ArrayList; +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Spliterator; +import java.util.Spliterators; +import java.util.function.Consumer; +import java.util.stream.Stream; +import java.util.stream.StreamSupport; + +import org.eclipse.rdf4j.query.BindingSet; +import org.eclipse.rdf4j.query.QueryEvaluationException; +import org.eclipse.rdf4j.query.TupleQueryResult; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class CachedTupleQueryResult implements TupleQueryResult { + private final List bindingSets; + private Iterator replayingIterator; + private final List bindingNames; + + CachedTupleQueryResult(List bindingSets, List bindingNames) { + this.bindingSets = new LinkedList<>(bindingSets); + this.bindingNames = new ArrayList<>(bindingNames); + this.replayingIterator = bindingSets.iterator(); + } + + @Override + public List getBindingNames() throws QueryEvaluationException { + return bindingNames; + } + + @Override + public Iterator iterator() { + return replayingIterator; + } + + @Override + public void close() throws QueryEvaluationException { + this.replayingIterator = null; + } + + @Override + public boolean hasNext() throws QueryEvaluationException { + return replayingIterator.hasNext(); + } + + @Override + public BindingSet next() throws QueryEvaluationException { + return this.replayingIterator.next(); + } + + @Override + public void remove() throws QueryEvaluationException { + throw new UnsupportedOperationException("Remove is not supported"); + } + + @Override + public Stream stream() { + return StreamSupport.stream( + Spliterators.spliteratorUnknownSize(this.replayingIterator, Spliterator.ORDERED), + false); + } + + @Override + public void forEach(Consumer action) { + bindingSets.forEach(action); + } + + @Override + public Spliterator spliterator() { + return Spliterators.spliteratorUnknownSize(iterator(), Spliterator.ORDERED); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/CachingRepositoryConnection.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/CachingRepositoryConnection.java new file mode 100644 index 00000000000..fc160629643 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/CachingRepositoryConnection.java @@ -0,0 +1,228 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.resultcache; + +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.io.Reader; +import java.net.URL; + +import org.eclipse.rdf4j.common.iteration.CloseableIteration; +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.Resource; +import org.eclipse.rdf4j.model.Statement; +import org.eclipse.rdf4j.model.Value; +import org.eclipse.rdf4j.query.GraphQuery; +import org.eclipse.rdf4j.query.MalformedQueryException; +import org.eclipse.rdf4j.query.QueryLanguage; +import org.eclipse.rdf4j.query.TupleQuery; +import org.eclipse.rdf4j.query.Update; +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.eclipse.rdf4j.repository.RepositoryException; +import org.eclipse.rdf4j.repository.base.RepositoryConnectionWrapper; +import org.eclipse.rdf4j.rio.RDFFormat; +import org.eclipse.rdf4j.rio.RDFParseException; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class CachingRepositoryConnection extends RepositoryConnectionWrapper implements Clearable { + private final LRUResultCache localTupleQueryResultCache; + private final LRUResultCache localGraphQueryResultCache; + private final LRUResultCache globalTupleQueryResultCache; + private final LRUResultCache globalGraphQueryResultCache; + private final ResultCacheProperties properties; + private boolean clearGlobalResultCacheOnClose = false; + + public CachingRepositoryConnection( + RepositoryConnection delegate, + LRUResultCache globalTupleQueryResultCache, + LRUResultCache globalGraphQueryResultCache, + ResultCacheProperties properties) { + super(delegate.getRepository(), delegate); + this.globalGraphQueryResultCache = globalGraphQueryResultCache; + this.globalTupleQueryResultCache = globalTupleQueryResultCache; + this.localGraphQueryResultCache = new LRUResultCache<>(properties); + this.localTupleQueryResultCache = new LRUResultCache<>(properties); + this.properties = properties; + } + + private Integer makeCacheKey(QueryLanguage ql, String query, String baseURI) { + return (ql.toString() + query + baseURI).hashCode(); + } + + public void renewLocalResultCache(ResultCachingTupleQuery resultCachingTupleQuery) { + resultCachingTupleQuery.renewLocalResultCache(this.localTupleQueryResultCache); + } + + public void renewLocalResultCache(ResultCachingGraphQuery resultCachingGraphQuery) { + resultCachingGraphQuery.renewLocalResultCache(this.localGraphQueryResultCache); + } + + public void renewClearable(ClearableAwareUpdate update) { + update.renewClearable(this); + } + + @Override + public TupleQuery prepareTupleQuery(QueryLanguage ql, String queryString, String baseURI) + throws MalformedQueryException, RepositoryException { + return new ResultCachingTupleQuery( + getDelegate().prepareTupleQuery(ql, queryString, baseURI), + this.localTupleQueryResultCache, + this.globalTupleQueryResultCache, + properties); + } + + @Override + public GraphQuery prepareGraphQuery(QueryLanguage ql, String queryString, String baseURI) + throws MalformedQueryException, RepositoryException { + return new ResultCachingGraphQuery( + getDelegate().prepareGraphQuery(ql, queryString, baseURI), + this.localGraphQueryResultCache, + this.globalGraphQueryResultCache, + this.properties); + } + + @Override + public Update prepareUpdate(QueryLanguage ql, String updateString, String baseURI) + throws MalformedQueryException, RepositoryException { + return new ClearableAwareUpdate( + getDelegate().prepareUpdate(ql, updateString, baseURI), this); + } + + @Override + public void close() throws RepositoryException { + this.localGraphQueryResultCache.markDirty(); + this.localTupleQueryResultCache.markDirty(); + if (this.clearGlobalResultCacheOnClose) { + this.globalGraphQueryResultCache.markDirty(); + this.globalTupleQueryResultCache.markDirty(); + } + super.close(); + } + + /** + * As we are changing the repository's content, we have to reset all caches (even though it + */ + @Override + public void markDirty() { + this.localGraphQueryResultCache.markDirty(); + this.localTupleQueryResultCache.markDirty(); + this.globalTupleQueryResultCache.bypassForCurrentThread(); + this.globalGraphQueryResultCache.bypassForCurrentThread(); + this.clearGlobalResultCacheOnClose = true; + } + + @Override + public void clearCachedResults() { + this.localGraphQueryResultCache.clearCachedResults(); + this.localTupleQueryResultCache.clearCachedResults(); + } + + @Override + public void add(File file, String baseURI, RDFFormat dataFormat, Resource... contexts) + throws IOException, RDFParseException, RepositoryException { + super.add(file, baseURI, dataFormat, contexts); + markDirty(); + } + + @Override + public void add(InputStream in, String baseURI, RDFFormat dataFormat, Resource... contexts) + throws IOException, RDFParseException, RepositoryException { + super.add(in, baseURI, dataFormat, contexts); + markDirty(); + } + + @Override + public void add(Iterable statements, Resource... contexts) + throws RepositoryException { + super.add(statements, contexts); + markDirty(); + } + + @Override + public void add( + CloseableIteration statementIter, Resource... contexts) + throws RepositoryException { + super.add(statementIter, contexts); + markDirty(); + } + + @Override + public void add(Reader reader, String baseURI, RDFFormat dataFormat, Resource... contexts) + throws IOException, RDFParseException, RepositoryException { + super.add(reader, baseURI, dataFormat, contexts); + markDirty(); + } + + @Override + public void add(Resource subject, IRI predicate, Value object, Resource... contexts) + throws RepositoryException { + super.add(subject, predicate, object, contexts); + markDirty(); + } + + @Override + public void add(Statement st, Resource... contexts) throws RepositoryException { + super.add(st, contexts); + markDirty(); + } + + @Override + public void add(URL url, String baseURI, RDFFormat dataFormat, Resource... contexts) + throws IOException, RDFParseException, RepositoryException { + super.add(url, baseURI, dataFormat, contexts); + markDirty(); + } + + @Override + public void clear(Resource... contexts) throws RepositoryException { + super.clear(contexts); + markDirty(); + } + + @Override + public void remove(Iterable statements, Resource... contexts) + throws RepositoryException { + super.remove(statements, contexts); + markDirty(); + } + + @Override + public void remove( + CloseableIteration statementIter, Resource... contexts) + throws RepositoryException { + super.remove(statementIter, contexts); + markDirty(); + } + + @Override + public void remove(Resource subject, IRI predicate, Value object, Resource... contexts) + throws RepositoryException { + super.remove(subject, predicate, object, contexts); + markDirty(); + } + + @Override + public void remove(Statement st, Resource... contexts) throws RepositoryException { + super.remove(st, contexts); + markDirty(); + } + + @Override + public void removeNamespace(String prefix) throws RepositoryException { + super.removeNamespace(prefix); + markDirty(); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/CachingRepositoryConnectionFactory.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/CachingRepositoryConnectionFactory.java new file mode 100644 index 00000000000..b031ceb35af --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/CachingRepositoryConnectionFactory.java @@ -0,0 +1,48 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.resultcache; + +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.eclipse.rdf4j.spring.support.connectionfactory.DelegatingRepositoryConnectionFactory; +import org.eclipse.rdf4j.spring.support.connectionfactory.RepositoryConnectionFactory; +import org.eclipse.rdf4j.spring.util.RepositoryConnectionWrappingUtils; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class CachingRepositoryConnectionFactory extends DelegatingRepositoryConnectionFactory { + public CachingRepositoryConnectionFactory( + RepositoryConnectionFactory delegate, ResultCacheProperties properties) { + super(delegate); + this.properties = properties; + this.globalGraphQueryResultCache = new LRUResultCache<>(properties); + this.globalTupleQueryResultCache = new LRUResultCache<>(properties); + } + + private final LRUResultCache globalTupleQueryResultCache; + private final LRUResultCache globalGraphQueryResultCache; + + private final ResultCacheProperties properties; + + @Override + public RepositoryConnection getConnection() { + return RepositoryConnectionWrappingUtils.wrapOnce( + getDelegate().getConnection(), + con -> new CachingRepositoryConnection( + con, + globalTupleQueryResultCache, + globalGraphQueryResultCache, + properties), + CachingRepositoryConnection.class); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/Clearable.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/Clearable.java new file mode 100644 index 00000000000..3d921fb800f --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/Clearable.java @@ -0,0 +1,22 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.resultcache; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public interface Clearable { + void markDirty(); + + void clearCachedResults(); +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/ClearableAwareUpdate.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/ClearableAwareUpdate.java new file mode 100644 index 00000000000..f6350e4c3c0 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/ClearableAwareUpdate.java @@ -0,0 +1,59 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.resultcache; + +import java.lang.invoke.MethodHandles; +import java.lang.ref.WeakReference; + +import org.eclipse.rdf4j.query.Update; +import org.eclipse.rdf4j.query.UpdateExecutionException; +import org.eclipse.rdf4j.spring.support.query.DelegatingUpdate; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Update that, upon each invocation of execute(), clears the result cache it is aware of. + * + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class ClearableAwareUpdate extends DelegatingUpdate { + private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + WeakReference clearableRef; + + public ClearableAwareUpdate(Update delegate, Clearable clearable) { + super(delegate); + this.clearableRef = new WeakReference<>(clearable); + } + + @Override + public void execute() throws UpdateExecutionException { + super.execute(); + Clearable clearable = clearableRef.get(); + if (clearable == null) { + logger.debug( + "update executed, but reference to clearable (i.e. result-caching connection) is gone - cannot mark it dirty"); + return; + } + if (logger.isDebugEnabled()) { + logger.debug( + "marking Dirty: instance {} of type {}", + hashCode(), + clearable.getClass().getSimpleName()); + } + clearable.markDirty(); + } + + public void renewClearable(Clearable clearable) { + this.clearableRef = new WeakReference<>(clearable); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/LRUResultCache.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/LRUResultCache.java new file mode 100644 index 00000000000..060fd5c73cc --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/LRUResultCache.java @@ -0,0 +1,137 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.resultcache; + +import java.lang.invoke.MethodHandles; +import java.time.Duration; +import java.time.Instant; +import java.util.Collections; +import java.util.Map; +import java.util.Objects; +import java.util.WeakHashMap; +import java.util.concurrent.atomic.AtomicBoolean; + +import org.apache.commons.collections4.map.LRUMap; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * @param + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class LRUResultCache implements ResultCache { + private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private final Map> cache; + private final AtomicBoolean dirty = new AtomicBoolean(false); + private final Map bypassInThread = Collections.synchronizedMap(new WeakHashMap<>()); + private final Duration entryLifetime; + + public LRUResultCache(ResultCacheProperties properties) { + this.entryLifetime = properties.getEntryLifetime(); + this.cache = Collections.synchronizedMap( + new LRUMap<>(properties.getMaxSize(), properties.getInitialSize())); + } + + @Override + public T get(Integer key) { + debug("obtaining cached result for key {} from cache {}", key, hashCode()); + Objects.requireNonNull(key); + if (dirty.get()) { + debug("cache is dirty"); + clearCachedResults(); + debug("returning null"); + return null; + } + if (isBypass()) { + debug("bypassing cache, returning null"); + return null; + } + Entry entry = cache.get(key); + if (entry == null) { + debug("nothing found in cache, returning null"); + return null; + } + if (entry.isExpired()) { + cache.remove(key); + debug("cached object is expired, returning null"); + return null; + } + debug("returning cached object"); + return entry.getCachedObject(); + } + + private void debug(String message, Object... args) { + if (logger.isDebugEnabled()) { + logger.debug(message, args); + } + } + + private boolean isBypass() { + return bypassInThread.containsKey(Thread.currentThread()); + } + + @Override + public void put(Integer key, T cachedObject) { + Objects.requireNonNull(key); + Objects.requireNonNull(cachedObject); + debug("about to put object {} into cache {}", key, hashCode()); + if (isBypass()) { + debug("bypassing cache, not caching object"); + return; + } + if (dirty.get()) { + debug("cache is dirty"); + clearCachedResults(); + } + debug("putting object into cache"); + cache.put(key, new Entry<>(cachedObject)); + } + + @Override + public void markDirty() { + debug("marking dirty: cache {}", hashCode()); + this.dirty.set(true); + } + + @Override + public synchronized void clearCachedResults() { + debug("clearing cache {}", hashCode()); + if (dirty.get()) { + cache.clear(); + bypassInThread.clear(); + dirty.set(false); + } + } + + @Override + public void bypassForCurrentThread() { + bypassInThread.put(Thread.currentThread(), true); + } + + private class Entry { + E cachedObject; + Instant createdAtTimestamp = Instant.now(); + + public Entry(E cachedObject) { + this.cachedObject = cachedObject; + } + + public E getCachedObject() { + return cachedObject; + } + + public boolean isExpired() { + return createdAtTimestamp.plus(entryLifetime).isBefore(Instant.now()); + } + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/ResultCache.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/ResultCache.java new file mode 100644 index 00000000000..6221efd7848 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/ResultCache.java @@ -0,0 +1,33 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.resultcache; + +/** + * @param + * @param + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public interface ResultCache extends Clearable { + + T get(K key); + + void put(K key, T cachedObject); + + /** + * Calling this method instructs the cache to return null to all {@link #get(K)} calls and ignore any + * {@link #put(K, T)} calls from the current thread until the cache is cleared. Context: after a write operation on + * a connection (which is assumed to be handled exclusively by a dedicated thread), the local cache must be cleared + * and the global cache bypassed until the connection is returned. + */ + void bypassForCurrentThread(); +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/ResultCacheConfig.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/ResultCacheConfig.java new file mode 100644 index 00000000000..2fa1d9677e4 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/ResultCacheConfig.java @@ -0,0 +1,27 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.resultcache; + +import org.springframework.boot.autoconfigure.AutoConfiguration; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.boot.context.properties.EnableConfigurationProperties; +import org.springframework.context.annotation.Configuration; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@AutoConfiguration +@ConditionalOnProperty("rdf4j.spring.resultcache.enabled") +@EnableConfigurationProperties(ResultCacheProperties.class) +public class ResultCacheConfig { +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/ResultCacheProperties.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/ResultCacheProperties.java new file mode 100644 index 00000000000..47bdb934ee7 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/ResultCacheProperties.java @@ -0,0 +1,89 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.resultcache; + +import java.time.Duration; + +import org.springframework.boot.context.properties.ConfigurationProperties; +import org.springframework.validation.annotation.Validated; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@ConfigurationProperties(prefix = "rdf4j.spring.resultcache") +@Validated +public class ResultCacheProperties { + + private boolean enabled = false; + + /** + * Initial size of each cache * + */ + private int initialSize = 10; + + /** + * Maximum size of each cache * + */ + private int maxSize = 1000; + + /** + * If true, a global result cache is used that is cleared when the application writes to the repository. If false, + * no global result cache is used. + */ + private boolean assumeNoOtherRepositoryClients = false; + + /** + * Max age for cache entries. Specifiy as Duration, e.g. 1H, 10m, etc. + */ + private Duration entryLifetime = Duration.ofHours(1); + + public boolean isEnabled() { + return enabled; + } + + public void setEnabled(boolean enabled) { + this.enabled = enabled; + } + + public int getInitialSize() { + return initialSize; + } + + public void setInitialSize(int initialSize) { + this.initialSize = initialSize; + } + + public int getMaxSize() { + return maxSize; + } + + public void setMaxSize(int maxSize) { + this.maxSize = maxSize; + } + + public boolean isAssumeNoOtherRepositoryClients() { + return assumeNoOtherRepositoryClients; + } + + public void setAssumeNoOtherRepositoryClients(boolean assumeNoOtherRepositoryClients) { + this.assumeNoOtherRepositoryClients = assumeNoOtherRepositoryClients; + } + + public Duration getEntryLifetime() { + return entryLifetime; + } + + public void setEntryLifetime(Duration entryLifetime) { + this.entryLifetime = entryLifetime; + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/ResultCachingGraphQuery.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/ResultCachingGraphQuery.java new file mode 100644 index 00000000000..9ad5eba82ab --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/ResultCachingGraphQuery.java @@ -0,0 +1,107 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.resultcache; + +import java.lang.invoke.MethodHandles; +import java.lang.ref.WeakReference; + +import org.eclipse.rdf4j.query.BindingSet; +import org.eclipse.rdf4j.query.GraphQuery; +import org.eclipse.rdf4j.query.GraphQueryResult; +import org.eclipse.rdf4j.query.QueryEvaluationException; +import org.eclipse.rdf4j.spring.support.query.DelegatingGraphQuery; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class ResultCachingGraphQuery extends DelegatingGraphQuery { + private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private WeakReference> localResultCacheRef; + private final ResultCache globalResultCache; + private final ResultCacheProperties properties; + + public ResultCachingGraphQuery( + GraphQuery delegate, + ResultCache localResultCache, + ResultCache globalResultCache, + ResultCacheProperties properties) { + super(delegate); + this.localResultCacheRef = new WeakReference<>(localResultCache); + this.globalResultCache = globalResultCache; + this.properties = properties; + } + + public void renewLocalResultCache( + ResultCache localGraphQueryResultCache) { + if (logger.isDebugEnabled()) { + ResultCache previousCache = localResultCacheRef.get(); + logger.debug( + "resetting local result cache to {} (was: {})", + localGraphQueryResultCache.hashCode(), + previousCache != null ? previousCache.hashCode() : "null"); + } + this.localResultCacheRef = new WeakReference<>(localGraphQueryResultCache); + } + + @Override + public GraphQueryResult evaluate() throws QueryEvaluationException { + BindingSet currentBindings = getDelegate().getBindings(); + // TODO: this might be pretty slow due to the toString() call. Is there a better way to get + // a hash for a query with minmal risk of collision ? + Integer cacheKey = currentBindings.hashCode() + getDelegate().toString().hashCode(); + GraphQueryResult cachedResult; + logger.debug("Checking global result cache"); + if (properties.isAssumeNoOtherRepositoryClients()) { + cachedResult = recreateCachedResultIfPossible(currentBindings, cacheKey, globalResultCache); + if (cachedResult != null) { + return cachedResult; + } + } + logger.debug("Checking local result cache"); + ResultCache localResultCache = localResultCacheRef.get(); + if (localResultCache != null) { + cachedResult = recreateCachedResultIfPossible(currentBindings, cacheKey, localResultCache); + if (cachedResult != null) { + return cachedResult; + } + } + logger.debug("No reusable cached result found, executing query"); + GraphQueryResult delegateResult = getDelegate().evaluate(); + if (delegateResult instanceof ReusableGraphQueryResult) { + throw new IllegalStateException( + "Cannot cache an already cached result! This should not happen, the caching layer seems misconfigured."); + } + ReusableGraphQueryResult cacheableResult = new ReusableGraphQueryResult(delegateResult, currentBindings); + if (localResultCache != null) { + localResultCache.put(cacheKey, cacheableResult); + } + return cacheableResult; + } + + private GraphQueryResult recreateCachedResultIfPossible( + BindingSet currentBindings, + Integer cacheKey, + ResultCache cache) { + ReusableGraphQueryResult result; + result = cache.get(cacheKey); + if (result != null + && result.queryBindingsAreIdentical(currentBindings) + && result.canReuse()) { + logger.debug("Reusing previously calculated result"); + return result.recreateGraphQueryResult(); + } + return null; + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/ResultCachingTupleQuery.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/ResultCachingTupleQuery.java new file mode 100644 index 00000000000..10b6a7f0592 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/ResultCachingTupleQuery.java @@ -0,0 +1,120 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.resultcache; + +import java.lang.invoke.MethodHandles; +import java.lang.ref.WeakReference; + +import org.eclipse.rdf4j.query.BindingSet; +import org.eclipse.rdf4j.query.QueryEvaluationException; +import org.eclipse.rdf4j.query.QueryResults; +import org.eclipse.rdf4j.query.TupleQuery; +import org.eclipse.rdf4j.query.TupleQueryResult; +import org.eclipse.rdf4j.query.TupleQueryResultHandler; +import org.eclipse.rdf4j.query.TupleQueryResultHandlerException; +import org.eclipse.rdf4j.spring.support.query.DelegatingTupleQuery; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class ResultCachingTupleQuery extends DelegatingTupleQuery { + private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + + private WeakReference> localResultCacheRef; + private final ResultCache globalResultCache; + private final ResultCacheProperties properties; + + public ResultCachingTupleQuery( + TupleQuery delegate, + ResultCache localResultCache, + ResultCache globalResultCache, + ResultCacheProperties properties) { + super(delegate); + this.localResultCacheRef = new WeakReference<>(localResultCache); + this.globalResultCache = globalResultCache; + this.properties = properties; + } + + public void renewLocalResultCache( + ResultCache localResultCache) { + if (logger.isDebugEnabled()) { + ResultCache previousCache = localResultCacheRef.get(); + logger.debug( + "resetting local result cache to {} (was: {})", + localResultCache.hashCode(), + previousCache != null ? previousCache.hashCode() : "null"); + } + this.localResultCacheRef = new WeakReference<>(localResultCache); + } + + @Override + public TupleQueryResult evaluate() throws QueryEvaluationException { + BindingSet currentBindings = getDelegate().getBindings(); + // TODO: this might be pretty slow due to the toString() call. Is there a better way to get + // a hash for a query with minmal risk of collision ? + Integer cacheKey = currentBindings.hashCode() + getDelegate().toString().hashCode(); + logger.debug("Checking global result cache"); + TupleQueryResult cachedResult; + if (properties.isAssumeNoOtherRepositoryClients()) { + cachedResult = recreateCachedResultIfPossible(globalResultCache, currentBindings, cacheKey); + if (cachedResult != null) { + return cachedResult; + } + } + logger.debug("Checking local result cache"); + ResultCache localResultCache = localResultCacheRef.get(); + if (localResultCache != null) { + cachedResult = recreateCachedResultIfPossible(localResultCache, currentBindings, cacheKey); + if (cachedResult != null) { + return cachedResult; + } + } + logger.debug("No reusable cached result found, executing query"); + TupleQueryResult delegateResult = getDelegate().evaluate(); + if (delegateResult instanceof ReusableTupleQueryResult) { + throw new IllegalStateException( + "Cannot cache an already cached result! This should not happen, the caching layer seems misconfigured."); + } + ReusableTupleQueryResult cacheableResult = new ReusableTupleQueryResult(delegateResult, currentBindings); + if (localResultCache != null) { + localResultCache.put(cacheKey, cacheableResult); + } + if (properties.isAssumeNoOtherRepositoryClients()) { + globalResultCache.put(cacheKey, cacheableResult); + } + return cacheableResult; + } + + private TupleQueryResult recreateCachedResultIfPossible( + ResultCache cache, + BindingSet currentBindings, + Integer cacheKey) { + ReusableTupleQueryResult result = cache.get(cacheKey); + if (result != null + && result.queryBindingsAreIdentical(currentBindings) + && result.canReuse()) { + logger.debug("Reusing cached result"); + return result.recreateTupleQueryResult(); + } + return null; + } + + @Override + public void evaluate(TupleQueryResultHandler handler) + throws QueryEvaluationException, TupleQueryResultHandlerException { + TupleQueryResult queryResult = evaluate(); + QueryResults.report(queryResult, handler); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/ReusableGraphQueryResult.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/ReusableGraphQueryResult.java new file mode 100644 index 00000000000..cbad21928ea --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/ReusableGraphQueryResult.java @@ -0,0 +1,204 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.resultcache; + +import static org.eclipse.rdf4j.spring.resultcache.ThrowableRecorder.recordingThrowable; + +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.Spliterator; +import java.util.Spliterators; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Consumer; +import java.util.stream.Stream; + +import org.eclipse.rdf4j.model.Statement; +import org.eclipse.rdf4j.query.BindingSet; +import org.eclipse.rdf4j.query.GraphQueryResult; +import org.eclipse.rdf4j.query.QueryEvaluationException; +import org.eclipse.rdf4j.spring.support.query.DelegatingIterator; + +/** + * Wrapper for a TupleQueryResult, allowing the result to be replayed. Not thread-safe. The wrapper copies the result + * data, consuming the original result fully on close(). + * + *

+ * Example: + * + *

+ * TupleQueryResult result = tupleQuery.execute();
+ * ReuseableTupleQueryResult reusable = new ReuseableTupleQueryResult(result);
+ * while (reusable.hasNext()) {
+ * 	reuseable.next();
+ * }
+ * reuseable.close();
+ * GraphQueryResult cached = reuseable.recreateGraphQueryResult();
+ * while (cached.hasNext()) {
+ * 	cached.next();
+ * }
+ * cached.close();
+ *
+ * 
+ * + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class ReusableGraphQueryResult implements GraphQueryResult, ThrowableRecorder { + private GraphQueryResult originalResult; + private final List statements; + private final AtomicBoolean recording = new AtomicBoolean(true); + private final AtomicBoolean exceptionDuringRecording = new AtomicBoolean(false); + private final BindingSet queryBindings; + private Map namespaces; + + public ReusableGraphQueryResult(GraphQueryResult result, BindingSet queryBindings) { + this.originalResult = result; + this.queryBindings = queryBindings; + this.statements = new LinkedList<>(); + this.recording.set(true); + this.exceptionDuringRecording.set(false); + } + + public boolean queryBindingsAreIdentical(BindingSet candidate) { + return queryBindings.equals(candidate); + } + + public boolean canReuse() { + return (!recording.get()) && originalResult == null && (!exceptionDuringRecording.get()); + } + + public CachedGraphQueryResult recreateGraphQueryResult() { + if (recording.get()) { + throw new IllegalStateException("Cannot reuse yet: still recording"); + } + return new CachedGraphQueryResult(this.statements, this.namespaces); + } + + @Override + public void recordThrowable(Throwable t) { + if (recording.get()) { + this.exceptionDuringRecording.set(true); + } + } + + @Override + public Map getNamespaces() throws QueryEvaluationException { + if (recording.get()) { + return this.originalResult.getNamespaces(); + } + throw new IllegalStateException("Not open"); + } + + @Override + public Iterator iterator() { + if (recording.get()) { + return recordingThrowable( + () -> new DelegatingIterator(originalResult.iterator()) { + @Override + public Statement next() { + Statement n = super.next(); + statements.add(n); + return n; + } + }, + this); + } else { + throw new IllegalStateException("Not open"); + } + } + + @Override + public void close() throws QueryEvaluationException { + if (recording.get()) { + recordingThrowable( + () -> { + // consume fully if there are more results + while (hasNext()) { + next(); + } + try { + this.namespaces = originalResult.getNamespaces(); + originalResult.close(); + } finally { + originalResult = null; + this.recording.set(false); + } + }, + this); + } else { + throw new IllegalStateException("Cannot close: not open"); + } + } + + @Override + public boolean hasNext() throws QueryEvaluationException { + if (recording.get()) { + return recordingThrowable(() -> originalResult.hasNext(), this); + } else { + return false; + } + } + + @Override + public Statement next() throws QueryEvaluationException { + if (recording.get()) { + Statement n = recordingThrowable(() -> originalResult.next(), this); + statements.add(n); + return n; + } else { + throw new IllegalStateException("Not open"); + } + } + + @Override + public void remove() throws QueryEvaluationException { + throw new UnsupportedOperationException("Remove is not supported"); + } + + @Override + public Stream stream() { + if (recording.get()) { + return recordingThrowable( + () -> originalResult.stream() + .map( + bindings -> { + statements.add(bindings); + return bindings; + }), + this); + } else { + throw new IllegalStateException("Not open"); + } + } + + @Override + public void forEach(Consumer action) { + if (recording.get()) { + recordingThrowable( + () -> originalResult.forEach( + bindings -> { + statements.add(bindings); + action.accept(bindings); + }), + this); + } else { + throw new IllegalStateException("Not open"); + } + } + + @Override + public Spliterator spliterator() { + return Spliterators.spliteratorUnknownSize(iterator(), Spliterator.ORDERED); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/ReusableTupleQueryResult.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/ReusableTupleQueryResult.java new file mode 100644 index 00000000000..97281c5ce3d --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/ReusableTupleQueryResult.java @@ -0,0 +1,209 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.resultcache; + +import static org.eclipse.rdf4j.spring.resultcache.ThrowableRecorder.recordingThrowable; + +import java.util.Iterator; +import java.util.LinkedList; +import java.util.List; +import java.util.Spliterator; +import java.util.Spliterators; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Consumer; +import java.util.stream.Stream; + +import org.eclipse.rdf4j.query.BindingSet; +import org.eclipse.rdf4j.query.QueryEvaluationException; +import org.eclipse.rdf4j.query.TupleQueryResult; +import org.eclipse.rdf4j.spring.support.query.DelegatingIterator; + +/** + * Wrapper for a TupleQueryResult, allowing the result to be replayed. The wrapper copies the result data, consuming the + * original result fully on close(). + * + *

+ * Example: + * + *

+ * TupleQueryResult result = tupleQuery.execute();
+ * ReuseableTupleQueryResult reusable = new ReuseableTupleQueryResult(result);
+ * while (reusable.hasNext()) {
+ * 	reuseable.next();
+ * }
+ * reuseable.close();
+ * TupleQueryResult cached = reuseable.recreateTupleQueryResult();
+ * while (cached.hasNext()) {
+ * 	cached.next();
+ * }
+ * cached.close();
+ *
+ * 
+ * + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class ReusableTupleQueryResult implements TupleQueryResult, ThrowableRecorder { + private TupleQueryResult originalResult; + private final List bindingSets; + private final AtomicBoolean recording = new AtomicBoolean(true); + private final AtomicBoolean exceptionDuringRecording = new AtomicBoolean(false); + private final BindingSet queryBindings; + private List bindingNames; + + public ReusableTupleQueryResult(TupleQueryResult result, BindingSet queryBindings) { + this.originalResult = result; + this.queryBindings = queryBindings; + this.bindingSets = new LinkedList<>(); + this.recording.set(true); + this.exceptionDuringRecording.set(false); + } + + public boolean queryBindingsAreIdentical(BindingSet candidate) { + return queryBindings.equals(candidate); + } + + public boolean canReuse() { + return (!recording.get()) && originalResult == null && (!exceptionDuringRecording.get()); + } + + public CachedTupleQueryResult recreateTupleQueryResult() { + if (recording.get()) { + throw new IllegalStateException("Cannot reuse yet: still recording"); + } + return new CachedTupleQueryResult(this.bindingSets, this.bindingNames); + } + + @Override + public void recordThrowable(Throwable t) { + if (recording.get()) { + this.exceptionDuringRecording.set(true); + } + } + + @Override + public List getBindingNames() throws QueryEvaluationException { + if (recording.get()) { + return recordingThrowable( + originalResult::getBindingNames, + this); + } else { + throw new IllegalStateException("Not open"); + } + } + + @Override + public Iterator iterator() { + if (recording.get()) { + return recordingThrowable( + () -> new DelegatingIterator(originalResult.iterator()) { + @Override + public BindingSet next() { + BindingSet n = super.next(); + bindingSets.add(n); + return n; + } + }, + this); + } else { + throw new IllegalStateException("Not open"); + } + } + + @Override + public void close() throws QueryEvaluationException { + if (recording.get()) { + recordingThrowable( + () -> { + // consume fully if there are more results + while (hasNext()) { + next(); + } + try { + this.bindingNames = originalResult.getBindingNames(); + originalResult.close(); + } finally { + originalResult = null; + this.recording.set(false); + } + }, + this); + } else { + throw new IllegalStateException("Cannot close: not open"); + } + } + + @Override + public boolean hasNext() throws QueryEvaluationException { + if (recording.get()) { + return recordingThrowable(() -> originalResult.hasNext(), this); + } else { + throw new IllegalStateException("Not open"); + } + } + + @Override + public BindingSet next() throws QueryEvaluationException { + if (recording.get()) { + BindingSet n = recordingThrowable(() -> originalResult.next(), this); + bindingSets.add(n); + return n; + } else { + throw new IllegalStateException("Not open"); + } + } + + @Override + public void remove() throws QueryEvaluationException { + throw new UnsupportedOperationException("Remove is not supported"); + } + + @Override + public Stream stream() { + if (recording.get()) { + return recordingThrowable( + () -> originalResult.stream() + .map( + bindings -> { + bindingSets.add(bindings); + return bindings; + }), + this); + } else { + throw new IllegalStateException("Not open"); + } + } + + @Override + public void forEach(Consumer action) { + if (recording.get()) { + recordingThrowable( + () -> originalResult.forEach( + bindings -> { + bindingSets.add(bindings); + action.accept(bindings); + }), + this); + } else { + throw new IllegalStateException("Not open"); + } + } + + @Override + public Spliterator spliterator() { + if (recording.get()) { + return Spliterators.spliteratorUnknownSize(iterator(), Spliterator.ORDERED); + } else { + throw new IllegalStateException("Not open"); + } + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/ThrowableRecorder.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/ThrowableRecorder.java new file mode 100644 index 00000000000..10144dcf75b --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/ThrowableRecorder.java @@ -0,0 +1,40 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.resultcache; + +import java.util.function.Supplier; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public interface ThrowableRecorder { + void recordThrowable(Throwable t); + + static T recordingThrowable(Supplier supplier, ThrowableRecorder recorder) { + try { + return supplier.get(); + } catch (Throwable t) { + recorder.recordThrowable(t); + throw t; + } + } + + static void recordingThrowable(Runnable runnable, ThrowableRecorder recorder) { + try { + runnable.run(); + } catch (Throwable t) { + recorder.recordThrowable(t); + throw t; + } + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/package-info.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/package-info.java new file mode 100644 index 00000000000..331c2db22e1 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/resultcache/package-info.java @@ -0,0 +1,53 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +/** + * + * + *

Rdf4j-Spring ResultCache

+ * + * Automatically configures a cache for Rdf4J query results via the {@link org.eclipse.rdf4j.spring.RDF4JConfig + * Rdf4JConfig}. + * + *

+ * Enable via rdf4j.spring.resultcache.enabled=true. + * + *

+ * If enabled, the {@link org.eclipse.rdf4j.spring.RDF4JConfig Rdf4JConfig} wraps the + * {@link org.eclipse.rdf4j.spring.support.connectionfactory.RepositoryConnectionFactory RepositoryConnectionFactory} in + * a {@link org.eclipse.rdf4j.spring.resultcache.CachingRepositoryConnectionFactory CachingRepositoryConnectionFactory}, + * which wraps {@link org.eclipse.rdf4j.repository.RepositoryConnection RepositoryConnection}s in + * {@link org.eclipse.rdf4j.spring.resultcache.CachingRepositoryConnection CachingRepositoryConnection}s. These return + * {@link org.eclipse.rdf4j.spring.resultcache.ResultCachingGraphQuery ResultCachingGraphQuery} and + * {@link org.eclipse.rdf4j.spring.resultcache.ResultCachingTupleQuery ResultCachingTupleQuery} wrappers when + * instantiating queries. The + * ResultCaching(Tuple|Graph)Query returns a Reusable(Tuple|Graph)QueryResult, which records the + * results as they are read by the client code and keeps them for future use. + * + *

+ * There are two levels of caching: connection-level and global. The connection-level cache is cleared when the + * connection is closed (or returned to the pool, if pooling is enabled). The global cache is cleared whenever data is + * written to the repostitory by the application. + * + *

+ * Note: global result caching is disabled by default. The reason is that in the general case, we cannot be sure + * that no other application writes to the repository. If you are really sure that your application is the only + * one writing to the repository, or if the repository is read-only, you can enable the global result cache using + * rdf4j.spring.resultcache.assume-no-other-repository-clients=true. + * + *

+ * For More information on configuration, see {@link org.eclipse.rdf4j.spring.resultcache.ResultCacheProperties + * ResultCacheProperties} + * + * @since 4.0.0 + * @author Florian Kleedorfer + */ +package org.eclipse.rdf4j.spring.resultcache; diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/ConfigurationException.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/ConfigurationException.java new file mode 100644 index 00000000000..a000e05765c --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/ConfigurationException.java @@ -0,0 +1,35 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.support; + +import org.eclipse.rdf4j.spring.dao.exception.RDF4JSpringException; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class ConfigurationException extends RDF4JSpringException { + public ConfigurationException() { + } + + public ConfigurationException(String message) { + super(message); + } + + public ConfigurationException(String message, Throwable cause) { + super(message, cause); + } + + public ConfigurationException(Throwable cause) { + super(cause); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/DataInserter.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/DataInserter.java new file mode 100644 index 00000000000..ea86bdae7bf --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/DataInserter.java @@ -0,0 +1,61 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.support; + +import java.lang.invoke.MethodHandles; +import java.util.Objects; + +import org.eclipse.rdf4j.common.annotation.Experimental; +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.eclipse.rdf4j.rio.RDFFormat; +import org.eclipse.rdf4j.rio.Rio; +import org.eclipse.rdf4j.spring.dao.exception.RDF4JSpringException; +import org.eclipse.rdf4j.spring.support.connectionfactory.RepositoryConnectionFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.core.io.Resource; +import org.springframework.stereotype.Component; +import org.springframework.transaction.annotation.Propagation; +import org.springframework.transaction.annotation.Transactional; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@Experimental +@Component +public class DataInserter { + + private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + + @Autowired + RepositoryConnectionFactory connectionFactory; + + @Transactional(propagation = Propagation.REQUIRED) + public void insertData(Resource dataFile) { + Objects.requireNonNull(dataFile); + logger.debug("Loading data from {}...", dataFile); + try { + RepositoryConnection con = connectionFactory.getConnection(); + RDFFormat fmt = Rio.getParserFormatForFileName(dataFile.getFilename()) + .orElseThrow( + () -> new IllegalArgumentException( + "Failed to determine file format of input file " + + dataFile)); + con.add(dataFile.getInputStream(), "", fmt); + } catch (Exception e) { + throw new RDF4JSpringException("Unable to load test data", e); + } + logger.debug("\tdone loading data"); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/DefaultUUIDSource.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/DefaultUUIDSource.java new file mode 100644 index 00000000000..7261cd7102e --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/DefaultUUIDSource.java @@ -0,0 +1,27 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.support; + +import java.util.UUID; + +import org.eclipse.rdf4j.model.IRI; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class DefaultUUIDSource implements UUIDSource { + @Override + public IRI nextUUID() { + return toURNUUID(UUID.randomUUID().toString()); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/DirectOperationInstantiator.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/DirectOperationInstantiator.java new file mode 100644 index 00000000000..fb9ad7200d0 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/DirectOperationInstantiator.java @@ -0,0 +1,79 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.support; + +import java.lang.invoke.MethodHandles; +import java.util.function.Supplier; + +import org.eclipse.rdf4j.query.GraphQuery; +import org.eclipse.rdf4j.query.TupleQuery; +import org.eclipse.rdf4j.query.Update; +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class DirectOperationInstantiator implements OperationInstantiator { + + private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + + public TupleQuery getTupleQuery(RepositoryConnection con, String queryString) { + if (logger.isDebugEnabled()) { + logger.debug("new tupleQuery:\n\n{}\n", queryString); + } + return con.prepareTupleQuery(queryString); + } + + public Update getUpdate(RepositoryConnection con, String updateString) { + if (logger.isDebugEnabled()) { + logger.debug("new update:\n\n{}\n", updateString); + } + return con.prepareUpdate(updateString); + } + + public GraphQuery getGraphQuery(RepositoryConnection con, String graphQuery) { + if (logger.isDebugEnabled()) { + logger.debug("new graphQuery:\n\n{}\n", graphQuery); + } + return con.prepareGraphQuery(graphQuery); + } + + @Override + public TupleQuery getTupleQuery( + RepositoryConnection con, + Class owner, + String operationName, + Supplier tupleQueryStringSupplier) { + return getTupleQuery(con, tupleQueryStringSupplier.get()); + } + + @Override + public Update getUpdate( + RepositoryConnection con, + Class owner, + String operationName, + Supplier updateStringSupplier) { + return getUpdate(con, updateStringSupplier.get()); + } + + @Override + public GraphQuery getGraphQuery( + RepositoryConnection con, + Class owner, + String operationName, + Supplier graphQueryStringSupplier) { + return getGraphQuery(con, graphQueryStringSupplier.get()); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/OperationInstantiator.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/OperationInstantiator.java new file mode 100644 index 00000000000..583623c8b4d --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/OperationInstantiator.java @@ -0,0 +1,50 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.support; + +import java.util.function.Supplier; + +import org.eclipse.rdf4j.query.GraphQuery; +import org.eclipse.rdf4j.query.TupleQuery; +import org.eclipse.rdf4j.query.Update; +import org.eclipse.rdf4j.repository.RepositoryConnection; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public interface OperationInstantiator { + + TupleQuery getTupleQuery(RepositoryConnection con, String queryString); + + TupleQuery getTupleQuery( + RepositoryConnection con, + Class owner, + String operationName, + Supplier queryStringSupplier); + + Update getUpdate(RepositoryConnection con, String updateString); + + Update getUpdate( + RepositoryConnection con, + Class owner, + String operationName, + Supplier updateStringSupplier); + + GraphQuery getGraphQuery(RepositoryConnection con, String graphQuery); + + GraphQuery getGraphQuery( + RepositoryConnection con, + Class owner, + String operationName, + Supplier graphQuerySupplier); +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/RDF4JTemplate.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/RDF4JTemplate.java new file mode 100644 index 00000000000..0dbac366732 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/RDF4JTemplate.java @@ -0,0 +1,404 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.support; + +import static org.eclipse.rdf4j.spring.util.TypeMappingUtils.toIri; + +import java.io.IOException; +import java.io.StringWriter; +import java.lang.invoke.MethodHandles; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Objects; +import java.util.function.Consumer; +import java.util.function.Function; +import java.util.function.Supplier; +import java.util.stream.Collectors; + +import org.eclipse.rdf4j.common.annotation.Experimental; +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.Model; +import org.eclipse.rdf4j.model.Resource; +import org.eclipse.rdf4j.query.GraphQuery; +import org.eclipse.rdf4j.query.TupleQuery; +import org.eclipse.rdf4j.query.Update; +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.eclipse.rdf4j.rio.RDFFormat; +import org.eclipse.rdf4j.rio.Rio; +import org.eclipse.rdf4j.sail.shacl.ShaclSailValidationException; +import org.eclipse.rdf4j.sparqlbuilder.constraint.Expressions; +import org.eclipse.rdf4j.sparqlbuilder.constraint.propertypath.PropertyPath; +import org.eclipse.rdf4j.sparqlbuilder.core.SparqlBuilder; +import org.eclipse.rdf4j.sparqlbuilder.core.Variable; +import org.eclipse.rdf4j.sparqlbuilder.core.query.ModifyQuery; +import org.eclipse.rdf4j.sparqlbuilder.core.query.Queries; +import org.eclipse.rdf4j.sparqlbuilder.graphpattern.TriplePattern; +import org.eclipse.rdf4j.sparqlbuilder.rdf.RdfValue; +import org.eclipse.rdf4j.spring.dao.exception.RDF4JDaoException; +import org.eclipse.rdf4j.spring.dao.support.UpdateWithModelBuilder; +import org.eclipse.rdf4j.spring.dao.support.opbuilder.GraphQueryEvaluationBuilder; +import org.eclipse.rdf4j.spring.dao.support.opbuilder.TupleQueryEvaluationBuilder; +import org.eclipse.rdf4j.spring.dao.support.opbuilder.UpdateExecutionBuilder; +import org.eclipse.rdf4j.spring.dao.support.sparql.NamedSparqlSupplier; +import org.eclipse.rdf4j.spring.support.connectionfactory.RepositoryConnectionFactory; +import org.eclipse.rdf4j.spring.util.TypeMappingUtils; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.core.io.ResourceLoader; + +/** + * @author Florian Kleedorfer + * @author Gabriel Pickl + * @since 4.0.0 + */ +@Experimental +public class RDF4JTemplate { + private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private final RepositoryConnectionFactory repositoryConnectionFactory; + private final OperationInstantiator operationInstantiator; + private final ResourceLoader resourceLoader; + private final UUIDSource uuidSource; + + public RDF4JTemplate( + @Autowired RepositoryConnectionFactory repositoryConnectionFactory, + @Autowired OperationInstantiator operationInstantiator, + @Autowired ResourceLoader resourceLoader, + @Autowired(required = false) UUIDSource uuidSource) { + this.repositoryConnectionFactory = repositoryConnectionFactory; + this.operationInstantiator = operationInstantiator; + this.resourceLoader = resourceLoader; + if (uuidSource == null) { + this.uuidSource = new DefaultUUIDSource(); + } else { + this.uuidSource = uuidSource; + } + } + + public void consumeConnection(final Consumer fun) { + RepositoryConnection con = getRepositoryConnection(); + if (logger.isDebugEnabled()) { + logger.debug( + "using connection {} of type {}", + con.hashCode(), + con.getClass().getSimpleName()); + } + try { + fun.accept(con); + } catch (Exception e) { + logIfShaclValidationFailure(e); + throw e; + } + } + + public T applyToConnection(final Function fun) { + RepositoryConnection con = getRepositoryConnection(); + if (logger.isDebugEnabled()) { + logger.debug( + "using connection {} of type {}", + con.hashCode(), + con.getClass().getSimpleName()); + } + try { + return fun.apply(con); + } catch (Exception e) { + logIfShaclValidationFailure(e); + throw e; + } + } + + /** + * Bypassing any caches, generates a new Update from the specified SPARQL string and returns a Builder for its + * execution. Should be avoided in favor of one of the methods that apply caching unless the update is not reusable. + */ + public UpdateExecutionBuilder update(String updateString) { + return applyToConnection( + con -> new UpdateExecutionBuilder( + operationInstantiator.getUpdate(con, updateString), this)); + } + + /** + * Uses a cached {@link Update} if one is available under the specified operationName + * for the {@link RepositoryConnection} that is used, otherwise the query string is obtained from the + * specified supplier, a new Update is instantiated and cached for future calls to this method. + *

+ * Note: this call is equivalent to {@link #update(String)} if operation caching is disabled. + * + * @param owner the class of the client requesting the update, used to generate a cache key in + * combination with the operation name + * @param operationName name of the operation that, within the scope of the client, identifies the update + * @param updateStringSupplier supplies the sparql of the update if needed + */ + public UpdateExecutionBuilder update( + Class owner, String operationName, Supplier updateStringSupplier) { + return applyToConnection( + con -> new UpdateExecutionBuilder( + operationInstantiator.getUpdate( + con, owner, operationName, updateStringSupplier), + this)); + } + + /** + * Reads the update from the specified resource and provides it through a {@link Supplier } in + * {@link #update(Class, String, Supplier)}, using the resourceName + * as the operationName. + */ + public UpdateExecutionBuilder updateFromResource(Class owner, String resourceName) { + return update( + owner, + resourceName, + () -> getStringSupplierFromResourceContent(resourceName).get()); + } + + /** + * Uses the provided {@link NamedSparqlSupplier} for calling {@link #update(Class, String, Supplier)}. + */ + public UpdateExecutionBuilder update(Class owner, NamedSparqlSupplier namedSparqlSupplier) { + return update( + owner, namedSparqlSupplier.getName(), namedSparqlSupplier.getSparqlSupplier()); + } + + public UpdateExecutionBuilder updateWithoutCachingStatement(String updateString) { + return applyToConnection( + con -> new UpdateExecutionBuilder(con.prepareUpdate(updateString), this)); + } + + public UpdateWithModelBuilder updateWithBuilder() { + return new UpdateWithModelBuilder(getRepositoryConnection()); + } + + /** + * Bypassing any caches, generates a new TupleQuery from the specified SPARQL string and returns a Builder for its + * evaluation. Should be avoided in favor of one of the methods that apply caching unless the query is not reusable. + */ + public TupleQueryEvaluationBuilder tupleQuery(String queryString) { + return new TupleQueryEvaluationBuilder( + applyToConnection(con -> operationInstantiator.getTupleQuery(con, queryString)), + this); + } + + /** + * Uses a cached {@link TupleQuery} if one is available under the specified operationName + * for the {@link RepositoryConnection} that is used, otherwise the query string is obtained from the + * specified supplier, a new TupleQuery is instantiated and cached for future calls to this method. + */ + public TupleQueryEvaluationBuilder tupleQuery( + Class owner, String operationName, Supplier queryStringSupplier) { + return new TupleQueryEvaluationBuilder( + applyToConnection( + con -> operationInstantiator.getTupleQuery( + con, owner, operationName, queryStringSupplier)), + this); + } + + /** + * Reads the query from the specified resource and provides it through a {@link Supplier } in + * {@link #tupleQuery(Class, String, Supplier)}, using the + * resourceName as the operationName. + */ + public TupleQueryEvaluationBuilder tupleQueryFromResource(Class owner, String resourceName) { + return tupleQuery( + owner, + resourceName, + () -> getStringSupplierFromResourceContent(resourceName).get()); + } + + /** + * Uses the provided {@link NamedSparqlSupplier} for calling {@link #tupleQuery(Class, String, Supplier)}. + */ + public TupleQueryEvaluationBuilder tupleQuery( + Class owner, NamedSparqlSupplier namedSparqlSupplier) { + return tupleQuery( + owner, namedSparqlSupplier.getName(), namedSparqlSupplier.getSparqlSupplier()); + } + + /** + * Bypassing any caches, generates a new GraphQuery from the specified SPARQL string and returns a Builder for its + * evaluation. Should be avoided in favor of one of the methods that apply caching unless the query is not reusable. + */ + public GraphQueryEvaluationBuilder graphQuery(String graphQueryString) { + return new GraphQueryEvaluationBuilder( + applyToConnection( + con -> operationInstantiator.getGraphQuery(con, graphQueryString)), + this); + } + + /** + * Uses a cached {@link GraphQuery} if one is available under the specified operationName + * for the {@link RepositoryConnection} that is used, otherwise the query string is obtained from the + * specified supplier, a new GraphQuery is instantiated and cached for future calls to this method. + */ + public GraphQueryEvaluationBuilder graphQuery( + Class owner, String operationName, Supplier queryStringSupplier) { + return new GraphQueryEvaluationBuilder( + applyToConnection( + con -> operationInstantiator.getGraphQuery( + con, owner, operationName, queryStringSupplier)), + this); + } + + /** + * Reads the query from the specified resource and provides it through a {@link Supplier } in + * {@link #graphQuery(Class, String, Supplier)}, using the + * resourceName as the operationName. + */ + public GraphQueryEvaluationBuilder graphQueryFromResource(Class owner, String resourceName) { + return graphQuery( + owner, + resourceName, + () -> getStringSupplierFromResourceContent(resourceName).get()); + } + + /** + * Uses the provided {@link NamedSparqlSupplier} for calling {@link #graphQuery(Class, String, Supplier)}. + */ + public GraphQueryEvaluationBuilder graphQuery( + Class owner, NamedSparqlSupplier namedSparqlSupplier) { + return graphQuery( + owner, namedSparqlSupplier.getName(), namedSparqlSupplier.getSparqlSupplier()); + } + + public void deleteTriplesWithSubject(IRI id) { + consumeConnection( + con -> { + con.remove(id, null, null); + }); + } + + /** + * Deletes the specified resource: all triples are deleted in which id is the subject or the object. + * + * @param id + */ + public void delete(IRI id) { + consumeConnection( + con -> { + con.remove(id, null, null); + con.remove((Resource) null, null, id); + }); + } + + /** + * Deletes the specified resource and all resources R reached via any of the specified property paths. + *

+ * Deletion means that all triples are removed in which start or any resource in R are the + * subject or the object. + * + * @param start the initial resource to be deleted + * @param propertyPaths paths by which to reach more resources to be deleted. + */ + public void delete(IRI start, List propertyPaths) { + List targets = new ArrayList<>(); + int i = 0; + Variable sp = SparqlBuilder.var("sp"); + Variable so = SparqlBuilder.var("so"); + Variable is = SparqlBuilder.var("is"); + Variable ip = SparqlBuilder.var("ip"); + ModifyQuery q = Queries.MODIFY() + .delete(toIri(start).has(sp, so), is.has(ip, start)) + .where(toIri(start).has(sp, so).optional(), is.has(ip, start).optional()); + for (PropertyPath p : propertyPaths) { + i++; + Variable target = SparqlBuilder.var("target_" + i); + Variable p1 = SparqlBuilder.var("p1_" + i); + Variable o1 = SparqlBuilder.var("o2_" + i); + Variable p2 = SparqlBuilder.var("p2_" + i); + Variable s2 = SparqlBuilder.var("s2_" + i); + q.delete(target.has(p1, o1), s2.has(p2, target)) + .where(toIri(start).has(p, target).optional(), target.has(p1, o1).optional(), + s2.has(p2, target).optional()); + } + update(q.getQueryString()).execute(); + } + + public void associate( + IRI fromResource, + IRI property, + Collection toResources, + boolean deleteOtherOutgoing, + boolean deleteOtherIcoming) { + Variable from = SparqlBuilder.var("fromResource"); + Variable to = SparqlBuilder.var("toResource"); + if (deleteOtherOutgoing) { + String query = Queries.MODIFY() + .delete(from.has(property, to)) + .where(from.has(property, to)) + .getQueryString(); + update(query).withBinding(from, fromResource).execute(); + } + if (deleteOtherIcoming) { + String query = Queries.MODIFY() + .delete(from.has(property, to)) + .where( + from.has(property, to) + .filter( + Expressions.in( + to, + toResources.stream() + .map(TypeMappingUtils::toIri) + .collect(Collectors.toList()) + .toArray( + new RdfValue[toResources + .size()])))) + .getQueryString(); + update(query).execute(); + } + String query = Queries.INSERT_DATA( + toResources.stream() + .map(e -> toIri(fromResource).has(property, e)) + .collect(Collectors.toList()) + .toArray(new TriplePattern[toResources.size()])) + .getQueryString(); + updateWithoutCachingStatement(query).execute(); + } + + /** + * Returns a {@link Supplier } that returns the String content of the specified resource (as obtained by a + * {@link ResourceLoader}). The resource's content is read once when this method is called (revealing any problem + * reading the resource early on. + */ + public Supplier getStringSupplierFromResourceContent(String resourceName) { + Objects.requireNonNull(resourceName); + try { + org.springframework.core.io.Resource res = resourceLoader.getResource(resourceName); + String contentAsString = new String(res.getInputStream().readAllBytes()); + return () -> contentAsString; + } catch (IOException e) { + throw new RDF4JDaoException( + String.format("Cannot read String from resource %s", resourceName)); + } + } + + private RepositoryConnection getRepositoryConnection() { + return repositoryConnectionFactory.getConnection(); + } + + private void logIfShaclValidationFailure(Throwable t) { + Throwable cause = t.getCause(); + if (cause instanceof ShaclSailValidationException) { + logger.error("SHACL Validation failed!"); + Model report = ((ShaclSailValidationException) cause).validationReportAsModel(); + StringWriter out = new StringWriter(); + Rio.write(report, out, RDFFormat.TURTLE); + logger.error("Validation report:\n{}", out.toString()); + } + } + + /** + * Returns a UUID IRI (schema: 'urn:uuid'). Actual implementation depends on the {@link #uuidSource} that has been + * configured. See {@link UUIDSource} and {@link org.eclipse.rdf4j.spring.uuidsource} for details. + */ + public IRI getNewUUID() { + return uuidSource.nextUUID(); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/UUIDSource.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/UUIDSource.java new file mode 100644 index 00000000000..db79782380a --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/UUIDSource.java @@ -0,0 +1,33 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.support; + +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.impl.SimpleValueFactory; + +/** + * Interface for making different approaches of obtaining new UUIDs pluggable into the {@link RDF4JTemplate + * Rdf4JTemplate}. The {@link org.eclipse.rdf4j.spring.RDF4JConfig Rdf4JConfig}. + * + *

+ * For more information, see {@link org.eclipse.rdf4j.spring.uuidsource}. + * + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public interface UUIDSource { + IRI nextUUID(); + + default IRI toURNUUID(String uuid) { + return SimpleValueFactory.getInstance().createIRI("urn:uuid:", uuid); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/connectionfactory/DelegatingRepositoryConnectionFactory.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/connectionfactory/DelegatingRepositoryConnectionFactory.java new file mode 100644 index 00000000000..27e34b70bba --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/connectionfactory/DelegatingRepositoryConnectionFactory.java @@ -0,0 +1,35 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.support.connectionfactory; + +import org.eclipse.rdf4j.repository.RepositoryConnection; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public abstract class DelegatingRepositoryConnectionFactory implements RepositoryConnectionFactory { + private final RepositoryConnectionFactory delegate; + + public DelegatingRepositoryConnectionFactory(RepositoryConnectionFactory delegate) { + this.delegate = delegate; + } + + @Override + public RepositoryConnection getConnection() { + return delegate.getConnection(); + } + + protected RepositoryConnectionFactory getDelegate() { + return delegate; + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/connectionfactory/DirectRepositoryConnectionFactory.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/connectionfactory/DirectRepositoryConnectionFactory.java new file mode 100644 index 00000000000..adf832c99d6 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/connectionfactory/DirectRepositoryConnectionFactory.java @@ -0,0 +1,32 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.support.connectionfactory; + +import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.RepositoryConnection; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class DirectRepositoryConnectionFactory implements RepositoryConnectionFactory { + private final Repository repository; + + public DirectRepositoryConnectionFactory(Repository repository) { + this.repository = repository; + } + + @Override + public RepositoryConnection getConnection() { + return repository.getConnection(); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/connectionfactory/RepositoryConnectionFactory.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/connectionfactory/RepositoryConnectionFactory.java new file mode 100644 index 00000000000..3c1b8dd0641 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/connectionfactory/RepositoryConnectionFactory.java @@ -0,0 +1,22 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.support.connectionfactory; + +import org.eclipse.rdf4j.repository.RepositoryConnection; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public interface RepositoryConnectionFactory { + RepositoryConnection getConnection(); +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/query/DelegatingGraphQuery.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/query/DelegatingGraphQuery.java new file mode 100644 index 00000000000..57f0ce6b6a8 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/query/DelegatingGraphQuery.java @@ -0,0 +1,122 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.support.query; + +import org.eclipse.rdf4j.common.annotation.Experimental; +import org.eclipse.rdf4j.model.Value; +import org.eclipse.rdf4j.query.BindingSet; +import org.eclipse.rdf4j.query.Dataset; +import org.eclipse.rdf4j.query.GraphQuery; +import org.eclipse.rdf4j.query.GraphQueryResult; +import org.eclipse.rdf4j.query.QueryEvaluationException; +import org.eclipse.rdf4j.query.explanation.Explanation; +import org.eclipse.rdf4j.rio.RDFHandler; +import org.eclipse.rdf4j.rio.RDFHandlerException; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public abstract class DelegatingGraphQuery implements GraphQuery { + private final GraphQuery delegate; + + public DelegatingGraphQuery(GraphQuery delegate) { + this.delegate = delegate; + } + + protected GraphQuery getDelegate() { + return delegate; + } + + @Override + public GraphQueryResult evaluate() throws QueryEvaluationException { + return delegate.evaluate(); + } + + @Override + public void evaluate(RDFHandler handler) throws QueryEvaluationException, RDFHandlerException { + delegate.evaluate(handler); + } + + @Override + @Deprecated + public void setMaxQueryTime(int maxQueryTime) { + delegate.setMaxQueryTime(maxQueryTime); + } + + @Override + @Deprecated + public int getMaxQueryTime() { + return delegate.getMaxQueryTime(); + } + + @Override + @Experimental + public Explanation explain(Explanation.Level level) { + return delegate.explain(level); + } + + @Override + public void setBinding(String name, Value value) { + delegate.setBinding(name, value); + } + + @Override + public void removeBinding(String name) { + delegate.removeBinding(name); + } + + @Override + public void clearBindings() { + delegate.clearBindings(); + } + + @Override + public BindingSet getBindings() { + return delegate.getBindings(); + } + + @Override + public void setDataset(Dataset dataset) { + delegate.setDataset(dataset); + } + + @Override + public Dataset getDataset() { + return delegate.getDataset(); + } + + @Override + public void setIncludeInferred(boolean includeInferred) { + delegate.setIncludeInferred(includeInferred); + } + + @Override + public boolean getIncludeInferred() { + return delegate.getIncludeInferred(); + } + + @Override + public void setMaxExecutionTime(int maxExecutionTimeSeconds) { + delegate.setMaxExecutionTime(maxExecutionTimeSeconds); + } + + @Override + public int getMaxExecutionTime() { + return delegate.getMaxExecutionTime(); + } + + @Override + public String toString() { + return delegate.toString(); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/query/DelegatingIterator.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/query/DelegatingIterator.java new file mode 100644 index 00000000000..ce2b7de8c32 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/query/DelegatingIterator.java @@ -0,0 +1,48 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.support.query; + +import java.util.Iterator; +import java.util.function.Consumer; + +/** + * @param + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class DelegatingIterator implements Iterator { + private final Iterator delegate; + + public DelegatingIterator(Iterator delegate) { + this.delegate = delegate; + } + + @Override + public boolean hasNext() { + return delegate.hasNext(); + } + + @Override + public T next() { + return delegate.next(); + } + + @Override + public void remove() { + delegate.remove(); + } + + @Override + public void forEachRemaining(Consumer action) { + delegate.forEachRemaining(action); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/query/DelegatingTupleQuery.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/query/DelegatingTupleQuery.java new file mode 100644 index 00000000000..876f8ec0668 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/query/DelegatingTupleQuery.java @@ -0,0 +1,123 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.support.query; + +import org.eclipse.rdf4j.common.annotation.Experimental; +import org.eclipse.rdf4j.model.Value; +import org.eclipse.rdf4j.query.BindingSet; +import org.eclipse.rdf4j.query.Dataset; +import org.eclipse.rdf4j.query.QueryEvaluationException; +import org.eclipse.rdf4j.query.TupleQuery; +import org.eclipse.rdf4j.query.TupleQueryResult; +import org.eclipse.rdf4j.query.TupleQueryResultHandler; +import org.eclipse.rdf4j.query.TupleQueryResultHandlerException; +import org.eclipse.rdf4j.query.explanation.Explanation; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public abstract class DelegatingTupleQuery implements TupleQuery { + private final TupleQuery delegate; + + public DelegatingTupleQuery(TupleQuery delegate) { + this.delegate = delegate; + } + + protected TupleQuery getDelegate() { + return delegate; + } + + @Override + public TupleQueryResult evaluate() throws QueryEvaluationException { + return delegate.evaluate(); + } + + @Override + public void evaluate(TupleQueryResultHandler handler) + throws QueryEvaluationException, TupleQueryResultHandlerException { + delegate.evaluate(handler); + } + + @Override + @Deprecated + public void setMaxQueryTime(int maxQueryTime) { + delegate.setMaxQueryTime(maxQueryTime); + } + + @Override + @Deprecated + public int getMaxQueryTime() { + return delegate.getMaxQueryTime(); + } + + @Override + @Experimental + public Explanation explain(Explanation.Level level) { + return delegate.explain(level); + } + + @Override + public void setBinding(String name, Value value) { + delegate.setBinding(name, value); + } + + @Override + public void removeBinding(String name) { + delegate.removeBinding(name); + } + + @Override + public void clearBindings() { + delegate.clearBindings(); + } + + @Override + public BindingSet getBindings() { + return delegate.getBindings(); + } + + @Override + public void setDataset(Dataset dataset) { + delegate.setDataset(dataset); + } + + @Override + public Dataset getDataset() { + return delegate.getDataset(); + } + + @Override + public void setIncludeInferred(boolean includeInferred) { + delegate.setIncludeInferred(includeInferred); + } + + @Override + public boolean getIncludeInferred() { + return delegate.getIncludeInferred(); + } + + @Override + public void setMaxExecutionTime(int maxExecutionTimeSeconds) { + delegate.setMaxExecutionTime(maxExecutionTimeSeconds); + } + + @Override + public int getMaxExecutionTime() { + return delegate.getMaxExecutionTime(); + } + + @Override + public String toString() { + return delegate.toString(); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/query/DelegatingUpdate.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/query/DelegatingUpdate.java new file mode 100644 index 00000000000..f447f90ba95 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/support/query/DelegatingUpdate.java @@ -0,0 +1,89 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.support.query; + +import org.eclipse.rdf4j.model.Value; +import org.eclipse.rdf4j.query.BindingSet; +import org.eclipse.rdf4j.query.Dataset; +import org.eclipse.rdf4j.query.Update; +import org.eclipse.rdf4j.query.UpdateExecutionException; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public abstract class DelegatingUpdate implements Update { + private final Update delegate; + + public DelegatingUpdate(Update delegate) { + this.delegate = delegate; + } + + protected Update getDelegate() { + return delegate; + } + + @Override + public void execute() throws UpdateExecutionException { + delegate.execute(); + } + + @Override + public void setBinding(String name, Value value) { + delegate.setBinding(name, value); + } + + @Override + public void removeBinding(String name) { + delegate.removeBinding(name); + } + + @Override + public void clearBindings() { + delegate.clearBindings(); + } + + @Override + public BindingSet getBindings() { + return delegate.getBindings(); + } + + @Override + public void setDataset(Dataset dataset) { + delegate.setDataset(dataset); + } + + @Override + public Dataset getDataset() { + return delegate.getDataset(); + } + + @Override + public void setIncludeInferred(boolean includeInferred) { + delegate.setIncludeInferred(includeInferred); + } + + @Override + public boolean getIncludeInferred() { + return delegate.getIncludeInferred(); + } + + @Override + public void setMaxExecutionTime(int maxExecutionTimeSeconds) { + delegate.setMaxExecutionTime(maxExecutionTimeSeconds); + } + + @Override + public int getMaxExecutionTime() { + return delegate.getMaxExecutionTime(); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/test/RDF4JTestConfig.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/test/RDF4JTestConfig.java new file mode 100644 index 00000000000..08bb395c4dd --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/test/RDF4JTestConfig.java @@ -0,0 +1,65 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.test; + +import org.eclipse.rdf4j.common.annotation.Experimental; +import org.eclipse.rdf4j.spring.RDF4JConfig; +import org.eclipse.rdf4j.spring.operationcache.OperationCacheConfig; +import org.eclipse.rdf4j.spring.operationlog.OperationLogConfig; +import org.eclipse.rdf4j.spring.operationlog.log.jmx.OperationLogJmxConfig; +import org.eclipse.rdf4j.spring.pool.PoolConfig; +import org.eclipse.rdf4j.spring.repository.inmemory.InMemoryRepositoryConfig; +import org.eclipse.rdf4j.spring.repository.remote.RemoteRepositoryConfig; +import org.eclipse.rdf4j.spring.resultcache.ResultCacheConfig; +import org.eclipse.rdf4j.spring.tx.TxConfig; +import org.eclipse.rdf4j.spring.uuidsource.noveltychecking.NoveltyCheckingUUIDSourceConfig; +import org.eclipse.rdf4j.spring.uuidsource.predictable.PredictableUUIDSourceConfig; +import org.eclipse.rdf4j.spring.uuidsource.sequence.UUIDSequenceConfig; +import org.eclipse.rdf4j.spring.uuidsource.simple.SimpleRepositoryUUIDSourceConfig; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Import; + +/** + * Spring configuration for use in unit tests. Imports the configurations of all subsystems that are autoconfigured when + * used outside of tests. Test configurations should import this configuration: + * + *

+ * 	@TestConfiguration
+ * 	@Import(RDF4JTestConfig.class)
+ * 	@ComponentScan(basePackages = "com.example.myapp.*")
+ *  	public class TestConfig {
+ * 			// application-specific configuration
+ *    }
+ * 
+ * + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@Experimental +@Configuration +@Import({ + RDF4JConfig.class, + InMemoryRepositoryConfig.class, + RemoteRepositoryConfig.class, + PoolConfig.class, + ResultCacheConfig.class, + OperationCacheConfig.class, + OperationLogConfig.class, + OperationLogJmxConfig.class, + TxConfig.class, + UUIDSequenceConfig.class, + NoveltyCheckingUUIDSourceConfig.class, + SimpleRepositoryUUIDSourceConfig.class, + PredictableUUIDSourceConfig.class +}) +public class RDF4JTestConfig { +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/IsolationLevelAdapter.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/IsolationLevelAdapter.java new file mode 100644 index 00000000000..adf3d198e44 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/IsolationLevelAdapter.java @@ -0,0 +1,54 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.tx; + +import org.eclipse.rdf4j.common.transaction.IsolationLevel; +import org.eclipse.rdf4j.common.transaction.IsolationLevels; +import org.eclipse.rdf4j.sail.Sail; +import org.springframework.transaction.InvalidIsolationLevelException; +import org.springframework.transaction.TransactionDefinition; + +/** + * @author ameingast@gmail.com + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class IsolationLevelAdapter { + static IsolationLevel adaptToRdfIsolation(Sail sail, int springIsolation) { + switch (springIsolation) { + case TransactionDefinition.ISOLATION_DEFAULT: + return sail.getDefaultIsolationLevel(); + case TransactionDefinition.ISOLATION_READ_COMMITTED: + return determineIsolationLevel(sail, IsolationLevels.READ_COMMITTED); + case TransactionDefinition.ISOLATION_READ_UNCOMMITTED: + return determineIsolationLevel(sail, IsolationLevels.READ_UNCOMMITTED); + case TransactionDefinition.ISOLATION_REPEATABLE_READ: + throw new InvalidIsolationLevelException( + "Unsupported isolation level for sail: " + sail + ": " + springIsolation); + case TransactionDefinition.ISOLATION_SERIALIZABLE: + return determineIsolationLevel(sail, IsolationLevels.SERIALIZABLE); + default: + throw new InvalidIsolationLevelException( + "Unsupported isolation level for sail: " + sail + ": " + springIsolation); + } + } + + private static IsolationLevel determineIsolationLevel( + Sail sail, IsolationLevel isolationLevel) { + if (sail.getSupportedIsolationLevels().contains(isolationLevel)) { + return isolationLevel; + } else { + throw new InvalidIsolationLevelException( + "Unsupported isolation level for sail: " + sail + ": " + isolationLevel); + } + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/RDF4JRepositoryTransactionManager.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/RDF4JRepositoryTransactionManager.java new file mode 100644 index 00000000000..84f747bcee9 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/RDF4JRepositoryTransactionManager.java @@ -0,0 +1,126 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.tx; + +import java.lang.invoke.MethodHandles; + +import org.eclipse.rdf4j.common.transaction.IsolationLevel; +import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.eclipse.rdf4j.repository.sail.SailRepository; +import org.eclipse.rdf4j.sail.Sail; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.transaction.TransactionDefinition; +import org.springframework.transaction.TransactionException; +import org.springframework.transaction.TransactionSystemException; +import org.springframework.transaction.support.AbstractPlatformTransactionManager; +import org.springframework.transaction.support.DefaultTransactionStatus; + +/** + * @author ameingast@gmail.com + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class RDF4JRepositoryTransactionManager extends AbstractPlatformTransactionManager { + + private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + + private final TransactionalRepositoryConnectionFactory repositoryConnectionFactory; + + public RDF4JRepositoryTransactionManager( + TransactionalRepositoryConnectionFactory repositoryConnectionFactory) { + this.repositoryConnectionFactory = repositoryConnectionFactory; + } + + @Override + protected Object doGetTransaction() throws TransactionException { + TransactionObject transactionData = this.repositoryConnectionFactory.getTransactionData(); + logger.debug("obtaining transaction data"); + if (transactionData == null) { + logger.debug("creating new transaction"); + transactionData = this.repositoryConnectionFactory.createTransaction(); + } else { + logger.debug("using existing transaction"); + transactionData.setExisting(true); + } + + return transactionData; + } + + @Override + protected boolean isExistingTransaction(Object transaction) throws TransactionException { + return ((TransactionObject) transaction).isExisting(); + } + + @Override + protected void doBegin(Object o, TransactionDefinition transactionDefinition) + throws TransactionException { + logger.debug("beginning transaction"); + TransactionObject data = (TransactionObject) o; + data.setTimeout(transactionDefinition.getTimeout()); + data.setIsolationLevel(transactionDefinition.getIsolationLevel()); + data.setPropagationBehavior(transactionDefinition.getPropagationBehavior()); + data.setReadOnly(transactionDefinition.isReadOnly()); + data.setName(Thread.currentThread().getName() + " " + transactionDefinition.getName()); + setIsolationLevel(data, transactionDefinition); + } + + private void setIsolationLevel( + TransactionObject transactionData, TransactionDefinition transactionDefinition) { + RepositoryConnection repositoryConnection = transactionData.getConnection(); + Repository repository = repositoryConnection.getRepository(); + + if (repository instanceof SailRepository) { + Sail sail = ((SailRepository) repository).getSail(); + IsolationLevel isolationLevel = IsolationLevelAdapter.adaptToRdfIsolation( + sail, transactionDefinition.getIsolationLevel()); + repositoryConnection.setIsolationLevel(isolationLevel); + } + } + + @Override + protected void doCommit(DefaultTransactionStatus defaultTransactionStatus) + throws TransactionException { + logger.debug("committting transaction"); + TransactionObject data = (TransactionObject) defaultTransactionStatus.getTransaction(); + try { + this.repositoryConnectionFactory.endTransaction(data.isRollbackOnly()); + } catch (Exception e) { + throw new TransactionSystemException("Error during commit", e); + } + } + + @Override + protected void doRollback(DefaultTransactionStatus defaultTransactionStatus) + throws TransactionException { + logger.debug("rolling back transaction"); + TransactionObject data = (TransactionObject) defaultTransactionStatus.getTransaction(); + try { + this.repositoryConnectionFactory.endTransaction(true); + } catch (Exception e) { + throw new TransactionSystemException("Error during rollback", e); + } + } + + @Override + protected void doSetRollbackOnly(DefaultTransactionStatus status) throws TransactionException { + logger.debug("marking transaction for rollback"); + TransactionObject data = (TransactionObject) status.getTransaction(); + data.setRollbackOnly(true); + } + + @Override + protected void doCleanupAfterCompletion(Object transaction) { + this.repositoryConnectionFactory.closeConnection(); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/TransactionObject.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/TransactionObject.java new file mode 100644 index 00000000000..130ec928fd7 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/TransactionObject.java @@ -0,0 +1,137 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.tx; + +import static org.springframework.transaction.TransactionDefinition.ISOLATION_DEFAULT; +import static org.springframework.transaction.TransactionDefinition.PROPAGATION_REQUIRED; +import static org.springframework.transaction.TransactionDefinition.TIMEOUT_DEFAULT; + +import java.util.function.Function; + +import org.eclipse.rdf4j.common.annotation.Experimental; +import org.eclipse.rdf4j.repository.RepositoryConnection; + +/** + * @author ameingast@gmail.com + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@Experimental +public class TransactionObject { + + private RepositoryConnection connection; + + private boolean existing; + + private String name = ""; + + private boolean rollbackOnly = false; + + private int timeout = TIMEOUT_DEFAULT; + + private int isolationLevel = ISOLATION_DEFAULT; + + private int propagationBehavior = PROPAGATION_REQUIRED; + + private boolean readOnly = false; + + public TransactionObject(RepositoryConnection connection) { + this.connection = connection; + } + + public RepositoryConnection getConnection() { + return connection; + } + + public void wrapConnection(Function wrapper) { + this.connection = wrapper.apply(connection); + } + + public void setExisting(boolean existing) { + this.existing = existing; + } + + public boolean isExisting() { + return existing; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public boolean isRollbackOnly() { + return rollbackOnly; + } + + public void setRollbackOnly(boolean rollbackOnly) { + this.rollbackOnly = rollbackOnly; + } + + public int getTimeout() { + return timeout; + } + + public void setTimeout(int timeout) { + this.timeout = timeout; + } + + public int getIsolationLevel() { + return isolationLevel; + } + + public void setIsolationLevel(int isolationLevel) { + this.isolationLevel = isolationLevel; + } + + public int getPropagationBehavior() { + return propagationBehavior; + } + + public void setPropagationBehavior(int propagationBehavior) { + this.propagationBehavior = propagationBehavior; + } + + public boolean isReadOnly() { + return readOnly; + } + + public void setReadOnly(boolean readOnly) { + this.readOnly = readOnly; + } + + @Override + public String toString() { + return "TransactionData{" + + "connection=" + + connection + + ", existing=" + + existing + + ", name='" + + name + + '\'' + + ", rollbackOnly=" + + rollbackOnly + + ", timeout=" + + timeout + + ", isolationLevel=" + + isolationLevel + + ", propagationBehavior=" + + propagationBehavior + + ", readOnly=" + + readOnly + + '}'; + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/TransactionalRepositoryConnection.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/TransactionalRepositoryConnection.java new file mode 100644 index 00000000000..1f40cb7e0a8 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/TransactionalRepositoryConnection.java @@ -0,0 +1,234 @@ +/******************************************************************************* + * Copyright (c) 2022 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.tx; + +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.io.Reader; +import java.net.URL; + +import org.eclipse.rdf4j.common.iteration.CloseableIteration; +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.Resource; +import org.eclipse.rdf4j.model.Statement; +import org.eclipse.rdf4j.model.Value; +import org.eclipse.rdf4j.query.MalformedQueryException; +import org.eclipse.rdf4j.query.Update; +import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.eclipse.rdf4j.repository.RepositoryException; +import org.eclipse.rdf4j.repository.RepositoryResult; +import org.eclipse.rdf4j.repository.base.RepositoryConnectionWrapper; +import org.eclipse.rdf4j.rio.RDFFormat; +import org.eclipse.rdf4j.rio.RDFParseException; +import org.eclipse.rdf4j.spring.tx.exception.WriteDeniedException; + +/** + * Connection wrapper that throws an exception if a write operation is attempted in a read-only transaction. + * + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class TransactionalRepositoryConnection extends RepositoryConnectionWrapper { + + TransactionObject transactionObject = null; + + public TransactionalRepositoryConnection(Repository repository) { + super(repository); + } + + public TransactionalRepositoryConnection(Repository repository, RepositoryConnection delegate) { + super(repository, delegate); + this.transactionObject = transactionObject; + } + + public void setTransactionObject(TransactionObject transactionObject) { + this.transactionObject = transactionObject; + } + + private void throwExceptionIfReadonly() { + if (this.transactionObject.isReadOnly()) { + throw new WriteDeniedException("Cannot write in a read-only transaction!"); + } + } + + @Override + public void add(File file, String baseURI, RDFFormat dataFormat, Resource... contexts) + throws IOException, RDFParseException, RepositoryException { + throwExceptionIfReadonly(); + super.add(file, baseURI, dataFormat, contexts); + } + + @Override + public void add(InputStream in, String baseURI, RDFFormat dataFormat, Resource... contexts) + throws IOException, RDFParseException, RepositoryException { + throwExceptionIfReadonly(); + super.add(in, baseURI, dataFormat, contexts); + } + + @Override + public void add(Iterable statements, Resource... contexts) + throws RepositoryException { + throwExceptionIfReadonly(); + super.add(statements, contexts); + } + + @Override + public void add( + CloseableIteration statementIter, Resource... contexts) + throws RepositoryException { + throwExceptionIfReadonly(); + super.add(statementIter, contexts); + } + + @Override + public void add(Reader reader, String baseURI, RDFFormat dataFormat, Resource... contexts) + throws IOException, RDFParseException, RepositoryException { + throwExceptionIfReadonly(); + super.add(reader, baseURI, dataFormat, contexts); + } + + @Override + public void add(Resource subject, IRI predicate, Value object, Resource... contexts) + throws RepositoryException { + throwExceptionIfReadonly(); + super.add(subject, predicate, object, contexts); + } + + @Override + public void add(Statement st, Resource... contexts) throws RepositoryException { + throwExceptionIfReadonly(); + super.add(st, contexts); + } + + @Override + public void add(URL url, String baseURI, RDFFormat dataFormat, Resource... contexts) + throws IOException, RDFParseException, RepositoryException { + throwExceptionIfReadonly(); + super.add(url, baseURI, dataFormat, contexts); + } + + @Override + public void clear(Resource... contexts) throws RepositoryException { + throwExceptionIfReadonly(); + super.clear(contexts); + } + + @Override + public void remove(Iterable statements, Resource... contexts) + throws RepositoryException { + throwExceptionIfReadonly(); + super.remove(statements, contexts); + } + + @Override + public void remove( + CloseableIteration statementIter, Resource... contexts) + throws RepositoryException { + throwExceptionIfReadonly(); + super.remove(statementIter, contexts); + } + + @Override + public void remove(Resource subject, IRI predicate, Value object, Resource... contexts) + throws RepositoryException { + throwExceptionIfReadonly(); + super.remove(subject, predicate, object, contexts); + } + + @Override + public void remove(Statement st, Resource... contexts) throws RepositoryException { + throwExceptionIfReadonly(); + super.remove(st, contexts); + } + + @Override + public void removeNamespace(String prefix) throws RepositoryException { + throwExceptionIfReadonly(); + super.removeNamespace(prefix); + } + + @Override + public void clearNamespaces() throws RepositoryException { + throwExceptionIfReadonly(); + super.clearNamespaces(); + } + + @Override + public void setNamespace(String prefix, String name) throws RepositoryException { + throwExceptionIfReadonly(); + super.setNamespace(prefix, name); + } + + @Override + public Update prepareUpdate(String update) throws RepositoryException, MalformedQueryException { + throwExceptionIfReadonly(); + return super.prepareUpdate(update); + } + + @Override + public void add(InputStream in, RDFFormat dataFormat, Resource... contexts) + throws IOException, RDFParseException, RepositoryException { + throwExceptionIfReadonly(); + super.add(in, dataFormat, contexts); + } + + @Override + public void add(Reader reader, RDFFormat dataFormat, Resource... contexts) + throws IOException, RDFParseException, RepositoryException { + throwExceptionIfReadonly(); + super.add(reader, dataFormat, contexts); + } + + @Override + public void add(URL url, Resource... contexts) + throws IOException, RDFParseException, RepositoryException { + throwExceptionIfReadonly(); + super.add(url, contexts); + } + + @Override + public void add(URL url, RDFFormat dataFormat, Resource... contexts) + throws IOException, RDFParseException, RepositoryException { + throwExceptionIfReadonly(); + super.add(url, dataFormat, contexts); + } + + @Override + public void add(File file, Resource... contexts) + throws IOException, RDFParseException, RepositoryException { + throwExceptionIfReadonly(); + super.add(file, contexts); + } + + @Override + public void add(File file, RDFFormat dataFormat, Resource... contexts) + throws IOException, RDFParseException, RepositoryException { + throwExceptionIfReadonly(); + super.add(file, dataFormat, contexts); + } + + @Override + public void add(RepositoryResult statements, Resource... contexts) + throws RepositoryException { + throwExceptionIfReadonly(); + super.add(statements, contexts); + } + + @Override + public void remove(RepositoryResult statements, Resource... contexts) + throws RepositoryException { + throwExceptionIfReadonly(); + super.remove(statements, contexts); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/TransactionalRepositoryConnectionFactory.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/TransactionalRepositoryConnectionFactory.java new file mode 100644 index 00000000000..d835901704c --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/TransactionalRepositoryConnectionFactory.java @@ -0,0 +1,191 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.tx; + +import static org.eclipse.rdf4j.spring.util.RepositoryConnectionWrappingUtils.findWrapper; +import static org.eclipse.rdf4j.spring.util.RepositoryConnectionWrappingUtils.wrapOnce; + +import java.lang.invoke.MethodHandles; + +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.eclipse.rdf4j.sail.shacl.ShaclSailValidationReportHelper; +import org.eclipse.rdf4j.spring.support.connectionfactory.RepositoryConnectionFactory; +import org.eclipse.rdf4j.spring.tx.exception.CommitException; +import org.eclipse.rdf4j.spring.tx.exception.ConnectionClosedException; +import org.eclipse.rdf4j.spring.tx.exception.NoTransactionException; +import org.eclipse.rdf4j.spring.tx.exception.RDF4JTransactionException; +import org.eclipse.rdf4j.spring.tx.exception.RollbackException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * @author ameingast@gmail.com + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class TransactionalRepositoryConnectionFactory implements RepositoryConnectionFactory { + + private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + + private final RepositoryConnectionFactory delegateFactory; + + private final ThreadLocal transactionData = new ThreadLocal<>(); + + public TransactionalRepositoryConnectionFactory(RepositoryConnectionFactory delegateFactory) { + this.delegateFactory = delegateFactory; + } + + public TransactionObject getTransactionData() { + return transactionData.get(); + } + + public RepositoryConnection getConnection() { + logger.debug("Trying to obtain connection"); + TransactionObject data = getTransactionData(); + if (data == null) { + throw new NoTransactionException("Cannot obtain connection: no transaction"); + } + RepositoryConnection con = data.getConnection(); + if (con == null) { + throw new RDF4JTransactionException( + "Cannot obtain connection: transaction started but no connection found"); + } + if (!con.isOpen()) { + throw new ConnectionClosedException("Cannot obtain connection: connection closed"); + } + if (data.isReadOnly()) { + logger.debug("transaction is readonly, not starting a database transaction"); + } else { + if (!con.isActive()) { + logger.debug( + "connection does not have an active transaction yet, starting transaction"); + con.begin(); + logger.debug("con.begin() called"); + } + } + logger.debug("returning connection"); + return con; + } + + public void closeConnection() { + logger.debug("Trying to close connection"); + RepositoryConnection con = null; + try { + TransactionObject data = getTransactionData(); + if (data == null) { + throw new NoTransactionException("Cannot close connection: no transaction"); + } + con = data.getConnection(); + if (con == null) { + throw new RDF4JTransactionException( + "Cannot close connection: transaction started but no connection found"); + } + if (!con.isOpen()) { + throw new ConnectionClosedException("Cannot close connection: connection closed"); + } + } finally { + try { + if (con != null && con.isActive()) { + logger.warn( + "Encountered active transaction when closing connection - rolling back!"); + con.rollback(); + logger.debug("con.rollback() called"); + } + } catch (Throwable t) { + logger.error("Error rolling back transaction", t); + } + try { + if (con != null) { + con.close(); + logger.debug("con.close() called"); + } + } catch (Throwable t) { + logger.error("Error closing connection", t); + } + this.transactionData.remove(); + logger.debug("Thread-local transaction data removed"); + } + } + + public TransactionObject createTransaction() { + logger.debug("Trying to create new transaction"); + RepositoryConnection delegate = delegateFactory.getConnection(); + RepositoryConnection wrappedCon = wrapOnce( + delegate, + con -> new TransactionalRepositoryConnection(con.getRepository(), con), + TransactionalRepositoryConnection.class); + TransactionObject txObj = new TransactionObject(wrappedCon); + transactionData.set(txObj); + TransactionalRepositoryConnection txCon = findWrapper(wrappedCon, TransactionalRepositoryConnection.class) + .get(); + txCon.setTransactionObject(txObj); + logger.debug("Transaction created"); + return txObj; + } + + public void endTransaction(boolean rollback) { + logger.debug("Trying to end transaction"); + TransactionObject data = getTransactionData(); + if (data == null) { + throw new NoTransactionException("Cannot obtain connection: no transaction"); + } + RepositoryConnection con = data.getConnection(); + if (con == null) { + throw new RDF4JTransactionException( + "Cannot obtain connection: transaction started but no connection found"); + } + if (!con.isOpen()) { + throw new ConnectionClosedException("Cannot obtain connection: connection closed"); + } + if (data.isReadOnly()) { + logger.debug("transaction is readonly"); + if (con.isActive()) { + logger.debug("however, the connection is active - rolling back"); + try { + con.rollback(); + } catch (Exception e) { + throw new RollbackException( + "Cannot rollback changes in readonly transaction: an error occurred", + e); + } + } else { + logger.debug("The connection is inactive, no updates have been attempted."); + } + } else { + if (con.isActive()) { + if (rollback) { + try { + logger.debug("rolling back transaction..."); + con.rollback(); + logger.debug("con.rollback() called"); + } catch (Throwable t) { + throw new RollbackException( + "Cannot rollback transaction: an error occurred", t); + } + } else { + try { + logger.debug("committing transaction..."); + con.commit(); + logger.debug("con.commit() called"); + } catch (Throwable t) { + ShaclSailValidationReportHelper + .getValidationReportAsString(t) + .ifPresent(report -> logger.error( + "SHACL validation failed, cannot commit. Validation report:\n{}", report)); + throw new CommitException( + "Cannot commit transaction: an error occurred", t); + } + } + } + } + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/TxConfig.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/TxConfig.java new file mode 100644 index 00000000000..a81f22cad27 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/TxConfig.java @@ -0,0 +1,35 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.tx; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.AutoConfiguration; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.boot.context.properties.EnableConfigurationProperties; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@AutoConfiguration +@ConditionalOnProperty(prefix = "rdf4j.spring.tx", name = "enabled") +@EnableConfigurationProperties(TxProperties.class) +public class TxConfig { + + @Bean + RDF4JRepositoryTransactionManager getTxManager( + @Autowired TransactionalRepositoryConnectionFactory txConnectionFactory) { + return new RDF4JRepositoryTransactionManager(txConnectionFactory); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/TxProperties.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/TxProperties.java new file mode 100644 index 00000000000..839f65b0a67 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/TxProperties.java @@ -0,0 +1,31 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.tx; + +import org.springframework.boot.context.properties.ConfigurationProperties; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@ConfigurationProperties(prefix = "rdf4j.spring.tx") +public class TxProperties { + private boolean enabled = false; + + public boolean isEnabled() { + return enabled; + } + + public void setEnabled(boolean enabled) { + this.enabled = enabled; + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/exception/CommitException.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/exception/CommitException.java new file mode 100644 index 00000000000..e8ca06e159b --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/exception/CommitException.java @@ -0,0 +1,34 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.tx.exception; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class CommitException extends RDF4JTransactionException { + public CommitException() { + } + + public CommitException(String message) { + super(message); + } + + public CommitException(String message, Throwable cause) { + super(message, cause); + } + + public CommitException(Throwable cause) { + super(cause); + } + +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/exception/ConnectionClosedException.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/exception/ConnectionClosedException.java new file mode 100644 index 00000000000..ee13e979916 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/exception/ConnectionClosedException.java @@ -0,0 +1,34 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.tx.exception; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class ConnectionClosedException extends RDF4JTransactionException { + public ConnectionClosedException() { + } + + public ConnectionClosedException(String message) { + super(message); + } + + public ConnectionClosedException(String message, Throwable cause) { + super(message, cause); + } + + public ConnectionClosedException(Throwable cause) { + super(cause); + } + +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/exception/NoTransactionException.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/exception/NoTransactionException.java new file mode 100644 index 00000000000..b0023d31e28 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/exception/NoTransactionException.java @@ -0,0 +1,34 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.tx.exception; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class NoTransactionException extends RDF4JTransactionException { + public NoTransactionException() { + } + + public NoTransactionException(String message) { + super(message); + } + + public NoTransactionException(String message, Throwable cause) { + super(message, cause); + } + + public NoTransactionException(Throwable cause) { + super(cause); + } + +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/exception/RDF4JTransactionException.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/exception/RDF4JTransactionException.java new file mode 100644 index 00000000000..24ae1f0505d --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/exception/RDF4JTransactionException.java @@ -0,0 +1,37 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.tx.exception; + +import org.eclipse.rdf4j.spring.dao.exception.RDF4JSpringException; + +/** + * @author ameingast@gmail.com + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class RDF4JTransactionException extends RDF4JSpringException { + public RDF4JTransactionException() { + } + + public RDF4JTransactionException(String message) { + super(message); + } + + public RDF4JTransactionException(String message, Throwable cause) { + super(message, cause); + } + + public RDF4JTransactionException(Throwable cause) { + super(cause); + } + +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/exception/RepositoryConnectionPoolException.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/exception/RepositoryConnectionPoolException.java new file mode 100644 index 00000000000..b0544f35418 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/exception/RepositoryConnectionPoolException.java @@ -0,0 +1,41 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.tx.exception; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class RepositoryConnectionPoolException extends RuntimeException { + public RepositoryConnectionPoolException() { + } + + public RepositoryConnectionPoolException(String message) { + super(message); + } + + public RepositoryConnectionPoolException(String message, Throwable cause) { + super(message, cause); + } + + public RepositoryConnectionPoolException(Throwable cause) { + super(cause); + } + + public RepositoryConnectionPoolException( + String message, + Throwable cause, + boolean enableSuppression, + boolean writableStackTrace) { + super(message, cause, enableSuppression, writableStackTrace); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/exception/RollbackException.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/exception/RollbackException.java new file mode 100644 index 00000000000..5aeffe5fc98 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/exception/RollbackException.java @@ -0,0 +1,33 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.tx.exception; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class RollbackException extends RDF4JTransactionException { + public RollbackException() { + } + + public RollbackException(String message) { + super(message); + } + + public RollbackException(String message, Throwable cause) { + super(message, cause); + } + + public RollbackException(Throwable cause) { + super(cause); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/exception/TransactionInactiveException.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/exception/TransactionInactiveException.java new file mode 100644 index 00000000000..8d46b1749f6 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/exception/TransactionInactiveException.java @@ -0,0 +1,33 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.tx.exception; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class TransactionInactiveException extends RDF4JTransactionException { + public TransactionInactiveException() { + } + + public TransactionInactiveException(String message) { + super(message); + } + + public TransactionInactiveException(String message, Throwable cause) { + super(message, cause); + } + + public TransactionInactiveException(Throwable cause) { + super(cause); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/exception/WriteDeniedException.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/exception/WriteDeniedException.java new file mode 100644 index 00000000000..b187aa2cb4c --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/exception/WriteDeniedException.java @@ -0,0 +1,33 @@ +/******************************************************************************* + * Copyright (c) 2022 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.tx.exception; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class WriteDeniedException extends RDF4JTransactionException { + public WriteDeniedException() { + } + + public WriteDeniedException(String message) { + super(message); + } + + public WriteDeniedException(String message, Throwable cause) { + super(message, cause); + } + + public WriteDeniedException(Throwable cause) { + super(cause); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/package-info.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/package-info.java new file mode 100644 index 00000000000..4ded52a1ba1 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/tx/package-info.java @@ -0,0 +1,35 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +/** + * + * + *

Rdf4J-Spring Tx

+ * + * Automatically configures spring transaction handling via {@link org.eclipse.rdf4j.spring.RDF4JConfig Rdf4JConfig}. + * + *

+ * To enable, set rdf4j.spring.tx.enabled=true + * + *

+ * If enabled, @{@link org.springframework.transaction.annotation.Transactional @Transactional} annotations and Spring's + * {@link org.springframework.transaction.support.TransactionTemplate TransactionTemplate} can be used to configure + * transactionality of Rdf4J repository accesses. + * + *

+ * Beware: suspending transactions is not supported. + * + * @since 4.0.0 + * @author Florian Kleedorfer + * + */ + +package org.eclipse.rdf4j.spring.tx; diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/util/ObjectMapUtils.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/util/ObjectMapUtils.java new file mode 100644 index 00000000000..3b6f0263852 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/util/ObjectMapUtils.java @@ -0,0 +1,64 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.util; + +import static org.eclipse.rdf4j.spring.util.TypeMappingUtils.toBoolean; +import static org.eclipse.rdf4j.spring.util.TypeMappingUtils.toBooleanMaybe; +import static org.eclipse.rdf4j.spring.util.TypeMappingUtils.toBooleanOptional; +import static org.eclipse.rdf4j.spring.util.TypeMappingUtils.toIRI; +import static org.eclipse.rdf4j.spring.util.TypeMappingUtils.toIRIMaybe; + +import java.util.Map; +import java.util.Objects; +import java.util.Optional; + +import org.eclipse.rdf4j.model.IRI; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class ObjectMapUtils { + public static IRI getIRI(Map map, String key) { + return toIRI((String) map.get(key)); + } + + public static IRI getIRIMaybe(Map map, String key) { + return toIRIMaybe((String) map.get(key)); + } + + public static Boolean getBoolean(Map map, String key) { + return toBoolean((String) map.get(key)); + } + + public static Boolean getBooleanMaybe(Map map, String key) { + return toBooleanMaybe((Boolean) map.get(key)); + } + + public static Optional getBooleanOptional(Map map, String key) { + return toBooleanOptional((Boolean) map.get(key)); + } + + public static String getString(Map map, String key) { + String value = (String) map.get(key); + Objects.requireNonNull(value); + return value; + } + + public static String getStringMaybe(Map map, String key) { + return (String) map.get(key); + } + + public static Optional getStringOptional(Map map, String key) { + return Optional.ofNullable(getStringMaybe(map, key)); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/util/QueryResultUtils.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/util/QueryResultUtils.java new file mode 100644 index 00000000000..ac707c8559a --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/util/QueryResultUtils.java @@ -0,0 +1,131 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.util; + +import java.util.Optional; + +import org.eclipse.rdf4j.common.annotation.Experimental; +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.Value; +import org.eclipse.rdf4j.query.BindingSet; +import org.eclipse.rdf4j.sparqlbuilder.core.Variable; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@Experimental +public class QueryResultUtils { + + public static Optional getValueOptional(BindingSet resultRow, String varName) { + return Optional.ofNullable(resultRow.getValue(varName)); + } + + public static Optional getValueOptional(BindingSet resultRow, Variable var) { + return getValueOptional(resultRow, var.getVarName()); + } + + public static Value getValueMaybe(BindingSet resultRow, String varName) { + return getValueOptional(resultRow, varName).orElse(null); + } + + public static Value getValueMaybe(BindingSet resultRow, Variable var) { + return getValueMaybe(resultRow, var.getVarName()); + } + + public static Value getValue(BindingSet resultRow, String varName) { + return getValueOptional(resultRow, varName) + .orElseThrow( + () -> new IllegalStateException( + String.format( + "BindingSet does not contain binding for variable %s", + varName))); + } + + public static Value getValue(BindingSet resultRow, Variable var) { + return getValue(resultRow, var.getVarName()); + } + + public static IRI getIRI(BindingSet resultRow, Variable var) { + return TypeMappingUtils.toIRI(getValue(resultRow, var)); + } + + public static IRI getIRI(BindingSet resultRow, String varName) { + return TypeMappingUtils.toIRI(getValue(resultRow, varName)); + } + + public static Optional getIRIOptional(BindingSet resultRow, String varName) { + return getValueOptional(resultRow, varName).map(TypeMappingUtils::toIRI); + } + + public static Optional getIRIOptional(BindingSet resultRow, Variable var) { + return getValueOptional(resultRow, var.getVarName()).map(TypeMappingUtils::toIRI); + } + + public static IRI getIRIMaybe(BindingSet resultRow, String varName) { + return getIRIOptional(resultRow, varName).orElse(null); + } + + public static IRI getIRIMaybe(BindingSet resultRow, Variable var) { + return getIRIMaybe(resultRow, var.getVarName()); + } + + public static String getString(BindingSet resultRow, Variable var) { + return getValue(resultRow, var).stringValue(); + } + + public static String getString(BindingSet resultRow, String varName) { + return getValue(resultRow, varName).stringValue(); + } + + public static Optional getStringOptional( + BindingSet resultRow, Variable var) { + return getValueOptional(resultRow, var).map(Value::stringValue); + } + + public static Optional getStringOptional(BindingSet resultRow, String varName) { + return getValueOptional(resultRow, varName).map(Value::stringValue); + } + + public static String getStringMaybe(BindingSet resultRow, String varName) { + return getStringOptional(resultRow, varName).orElse(null); + } + + public static String getStringMaybe(BindingSet resultRow, Variable var) { + return getStringMaybe(resultRow, var.getVarName()); + } + + public static Boolean getBoolean(BindingSet resultRow, Variable var) { + return TypeMappingUtils.toBoolean(getValue(resultRow, var)); + } + + public static Boolean getBoolean(BindingSet resultRow, String varName) { + return TypeMappingUtils.toBoolean(getValue(resultRow, varName)); + } + + public static Optional getBooleanOptional( + BindingSet resultRow, Variable var) { + return getValueOptional(resultRow, var).map(TypeMappingUtils::toBoolean); + } + + public static Optional getBooleanOptional(BindingSet resultRow, String varName) { + return getValueOptional(resultRow, varName).map(TypeMappingUtils::toBoolean); + } + + public static Boolean getBooleanMaybe(BindingSet resultRow, String varName) { + return getBooleanOptional(resultRow, varName).orElse(null); + } + + public static Boolean getBooleanMaybe(BindingSet resultRow, Variable var) { + return getBooleanMaybe(resultRow, var.getVarName()); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/util/RepositoryConnectionWrappingUtils.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/util/RepositoryConnectionWrappingUtils.java new file mode 100644 index 00000000000..da020f47272 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/util/RepositoryConnectionWrappingUtils.java @@ -0,0 +1,67 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.util; + +import java.lang.invoke.MethodHandles; +import java.util.Optional; +import java.util.function.Function; + +import org.eclipse.rdf4j.repository.DelegatingRepositoryConnection; +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class RepositoryConnectionWrappingUtils { + private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + + public static RepositoryConnection wrapOnce( + RepositoryConnection con, + Function wrapper, + Class wrapperClass) { + if (!isWrapped(con, wrapperClass)) { + logger.debug( + "connection is not wrapped in {}, wrapping it", wrapperClass.getSimpleName()); + return wrapper.apply(con); + } else { + logger.debug( + "connection is already wrapped in {}, not wrapping it", + wrapperClass.getSimpleName()); + } + return con; + } + + public static boolean isWrapped(RepositoryConnection con, Class wrapperClass) { + return findWrapper(con, wrapperClass).isPresent(); + } + + public static Optional findWrapper(RepositoryConnection con, Class wrapperClass) { + if (wrapperClass.isInstance(con)) { + return Optional.of((T) con); + } + if (con instanceof DelegatingRepositoryConnection) { + return findWrapper(((DelegatingRepositoryConnection) con).getDelegate(), wrapperClass); + } + return Optional.empty(); + } + + public static RepositoryConnection findRoot(RepositoryConnection con) { + if (con instanceof DelegatingRepositoryConnection) { + return findRoot(((DelegatingRepositoryConnection) con).getDelegate()); + } else { + return con; + } + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/util/TypeMappingUtils.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/util/TypeMappingUtils.java new file mode 100644 index 00000000000..609fe7cf4d9 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/util/TypeMappingUtils.java @@ -0,0 +1,114 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.util; + +import java.math.BigInteger; +import java.util.Collection; +import java.util.List; +import java.util.Objects; +import java.util.Optional; +import java.util.stream.Collectors; + +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.Literal; +import org.eclipse.rdf4j.model.Value; +import org.eclipse.rdf4j.model.impl.SimpleValueFactory; +import org.eclipse.rdf4j.sparqlbuilder.rdf.Iri; +import org.eclipse.rdf4j.sparqlbuilder.rdf.Rdf; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class TypeMappingUtils { + public static Iri toIri(IRI from) { + return Rdf.iri(from.toString()); + } + + public static List toIri(Collection from) { + return from.stream().map(e -> Rdf.iri(e.toString())).collect(Collectors.toList()); + } + + public static Iri[] toIriArray(Collection from) { + return toIri(from).toArray(new Iri[from.size()]); + } + + public static IRI toIRI(String from) { + return toIRIOptional(from) + .orElseThrow(() -> new NullPointerException("iriString must not be null")); + } + + public static IRI toIRIMaybe(String from) { + return toIRIOptional(from).orElse(null); + } + + public static Optional toIRIOptional(String from) { + return Optional.ofNullable(from).map(s -> SimpleValueFactory.getInstance().createIRI(s)); + } + + public static IRI toIRI(Value from) { + return SimpleValueFactory.getInstance().createIRI(from.stringValue()); + } + + public static Boolean toBoolean(Value from) { + return ((Literal) from).booleanValue(); + } + + public static Boolean toBooleanMaybe(Value from) { + return (from == null) ? null : toBoolean(from); + } + + public static Optional toBooleanOptional(Value from) { + return Optional.ofNullable(from).map(TypeMappingUtils::toBoolean); + } + + public static Boolean toBoolean(String from) { + Objects.requireNonNull(from); + return Boolean.valueOf(from); + } + + public static Boolean toBooleanMaybe(String from) { + return (from == null) ? null : toBoolean(from); + } + + public static Boolean toBooleanMaybe(Boolean from) { + return from; + } + + public static Optional toBooleanOptional(String from) { + return Optional.ofNullable(from).map(TypeMappingUtils::toBoolean); + } + + public static Optional toBooleanOptional(Boolean from) { + return Optional.ofNullable(from); + } + + public static Double toDouble(Value from) { + return ((Literal) from).doubleValue(); + } + + public static BigInteger toInteger(Value from) { + return ((Literal) from).integerValue(); + } + + public static Integer toInt(Value from) { + return ((Literal) from).intValue(); + } + + public static List toIRI(Collection from) { + return from.stream().map(TypeMappingUtils::toIRI).collect(Collectors.toList()); + } + + public static final String toString(Value from) { + return from.toString(); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/noveltychecking/NoveltyCheckingUUIDSource.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/noveltychecking/NoveltyCheckingUUIDSource.java new file mode 100644 index 00000000000..542ebdb7e19 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/noveltychecking/NoveltyCheckingUUIDSource.java @@ -0,0 +1,40 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.uuidsource.noveltychecking; + +import java.util.UUID; + +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.spring.support.RDF4JTemplate; +import org.eclipse.rdf4j.spring.support.UUIDSource; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class NoveltyCheckingUUIDSource implements UUIDSource { + @Autowired + private RDF4JTemplate rdf4JTemplate; + + @Override + public IRI nextUUID() { + return rdf4JTemplate.applyToConnection( + con -> { + IRI newId; + do { + newId = toURNUUID(UUID.randomUUID().toString()); + } while (con.hasStatement(newId, null, null, true)); + return newId; + }); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/noveltychecking/NoveltyCheckingUUIDSourceConfig.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/noveltychecking/NoveltyCheckingUUIDSourceConfig.java new file mode 100644 index 00000000000..9e28b52f97c --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/noveltychecking/NoveltyCheckingUUIDSourceConfig.java @@ -0,0 +1,32 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.uuidsource.noveltychecking; + +import org.springframework.boot.autoconfigure.AutoConfiguration; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.boot.context.properties.EnableConfigurationProperties; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@AutoConfiguration +@ConditionalOnProperty(prefix = "rdf4j.spring.uuidsource.noveltychecking", name = "enabled") +@EnableConfigurationProperties(NoveltyCheckingUUIDSourceProperties.class) +public class NoveltyCheckingUUIDSourceConfig { + @Bean + public NoveltyCheckingUUIDSource getUUIDSource() { + return new NoveltyCheckingUUIDSource(); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/noveltychecking/NoveltyCheckingUUIDSourceProperties.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/noveltychecking/NoveltyCheckingUUIDSourceProperties.java new file mode 100644 index 00000000000..767840fb49b --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/noveltychecking/NoveltyCheckingUUIDSourceProperties.java @@ -0,0 +1,31 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.uuidsource.noveltychecking; + +import org.springframework.boot.context.properties.ConfigurationProperties; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@ConfigurationProperties(prefix = "rdf4j.spring.uuidsource.noveltychecking") +public class NoveltyCheckingUUIDSourceProperties { + private boolean enabled; + + public boolean isEnabled() { + return enabled; + } + + public void setEnabled(boolean enabled) { + this.enabled = enabled; + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/package-info.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/package-info.java new file mode 100644 index 00000000000..1f222c081fe --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/package-info.java @@ -0,0 +1,39 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +/** + * This package contains different approaches for generating UUIDs. One of them always generates the same sequence of + * UUIDs after bean initialization, which is useful for tests. Three of them obtain them from a + * {@link org.eclipse.rdf4j.repository.Repository Repository}, guaranteeing their uniqueness. Due to the very, very, + * very low probability of a collision, it is recommended not to use any of the latter and instead rely on the one + * instantiated by default, {@link org.eclipse.rdf4j.spring.support.DefaultUUIDSource DefaultUUIDSource}. + * + *

    + *
  1. {@link org.eclipse.rdf4j.spring.uuidsource.predictable.PredictableUUIDSource PredictableUUIDSource}: Always + * generate the same sequence of UUIDs.
  2. + *
  3. {@link org.eclipse.rdf4j.spring.uuidsource.noveltychecking.NoveltyCheckingUUIDSource NoveltyCheckingUUIDSource}: + * Generate a {@link java.util.UUID UUID} locally using {@link java.util.UUID#randomUUID() UUID.randomUUID()} and then + * ask the repository if the UUID is unique. Enable with + * rdf4j.spring.uuidsource.noveltychecking.enabled=true + *
  4. {@link org.eclipse.rdf4j.spring.uuidsource.simple.SimpleRepositoryUUIDSource SimpleRepositoryUUIDSource}: Ask the + * repository for a new UUID each time one is needed. Enable with + * rdf4j.spring.uuidsource.simple.enabled=true + *
  5. {@link org.eclipse.rdf4j.spring.uuidsource.sequence.UUIDSequence UUIDSequence}: When a UUID is needed, ask the + * repository for N >> 1 UUIDs and answer one at a time. Enable with + * rdf4j.spring.uuidsource.sequence.enabled=true + *
+ * + * Only one of these approaches can be activated. + * + * @since 4.0.0 + * @author Florian Kleedorfer + */ +package org.eclipse.rdf4j.spring.uuidsource; diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/predictable/PredictableUUIDSource.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/predictable/PredictableUUIDSource.java new file mode 100644 index 00000000000..94bd16b1cd6 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/predictable/PredictableUUIDSource.java @@ -0,0 +1,41 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.uuidsource.predictable; + +import java.util.UUID; +import java.util.concurrent.atomic.AtomicLong; + +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.spring.support.UUIDSource; + +/** + * UUID source that generates the same sequence of UUIDs by counting up a long counter and using that as + * the value for generating a UUID. Useful for unit tests as newly generated entities will receive the same UUIDs each + * time the tests are executed. + * + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class PredictableUUIDSource implements UUIDSource { + + private final AtomicLong counter = new AtomicLong(0); + + public PredictableUUIDSource() { + } + + @Override + public IRI nextUUID() { + long value = counter.incrementAndGet(); + return toURNUUID(UUID.nameUUIDFromBytes(Long.toString(value).getBytes()).toString()); + } + +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/predictable/PredictableUUIDSourceConfig.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/predictable/PredictableUUIDSourceConfig.java new file mode 100644 index 00000000000..7017a1d27b1 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/predictable/PredictableUUIDSourceConfig.java @@ -0,0 +1,32 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.uuidsource.predictable; + +import org.springframework.boot.autoconfigure.AutoConfiguration; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.boot.context.properties.EnableConfigurationProperties; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@AutoConfiguration +@EnableConfigurationProperties(PredictableUUIDSourceProperties.class) +@ConditionalOnProperty(prefix = "rdf4j.spring.uuidsource.predictable", name = "enabled") +public class PredictableUUIDSourceConfig { + @Bean + public PredictableUUIDSource getUUIDSource() { + return new PredictableUUIDSource(); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/predictable/PredictableUUIDSourceProperties.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/predictable/PredictableUUIDSourceProperties.java new file mode 100644 index 00000000000..3d48092feb6 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/predictable/PredictableUUIDSourceProperties.java @@ -0,0 +1,33 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.uuidsource.predictable; + +import org.springframework.boot.context.properties.ConfigurationProperties; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@ConfigurationProperties(prefix = "rdf4j.spring.uuidsource.predictable") +public class PredictableUUIDSourceProperties { + + private boolean enabled; + + public boolean isEnabled() { + return enabled; + } + + public void setEnabled(boolean enabled) { + this.enabled = enabled; + } + +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/sequence/UUIDSequence.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/sequence/UUIDSequence.java new file mode 100644 index 00000000000..1b664de89a4 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/sequence/UUIDSequence.java @@ -0,0 +1,105 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.uuidsource.sequence; + +import static org.eclipse.rdf4j.spring.util.QueryResultUtils.getIRI; + +import java.lang.invoke.MethodHandles; +import java.util.ArrayDeque; +import java.util.Collections; +import java.util.Map; +import java.util.Queue; +import java.util.WeakHashMap; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.query.BindingSet; +import org.eclipse.rdf4j.query.TupleQuery; +import org.eclipse.rdf4j.query.TupleQueryResult; +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.eclipse.rdf4j.spring.support.RDF4JTemplate; +import org.eclipse.rdf4j.spring.support.UUIDSource; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class UUIDSequence implements UUIDSource { + private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); + private @Autowired RDF4JTemplate rdf4JTemplate; + private final int prefetchCount; + private final Map> prefetchedUUIDs = Collections + .synchronizedMap(new WeakHashMap<>()); + + public UUIDSequence(UUIDSequenceProperties properties) { + this.prefetchCount = properties.getPrefetchCount(); + logger.debug("UUIDSequence uses prefetchCount of {}", prefetchCount); + } + + @Override + public IRI nextUUID() { + if (logger.isDebugEnabled()) { + logger.debug("Obtaining UUID from UUIDSequence..."); + } + return rdf4JTemplate.applyToConnection( + con -> { + Queue uuids = prefetchedUUIDs.computeIfAbsent(con, this::prefetchUUIDs); + IRI uuid = uuids.poll(); + if (uuid == null) { + uuids = prefetchUUIDs(con); + prefetchedUUIDs.put(con, uuids); + uuid = uuids.poll(); + } + if (uuid == null) { + throw new IllegalStateException("Unable to produce next UUID in sequence"); + } + if (logger.isDebugEnabled()) { + logger.debug("Returning next UUID"); + } + return uuid; + }); + } + + private Queue prefetchUUIDs(RepositoryConnection con) { + double nd = Math.pow(prefetchCount, 1d / 3d); + int n = (int) Math.ceil(nd); + int exactPrefetchCount = (int) Math.ceil(Math.pow(n, 3d)); + if (logger.isDebugEnabled()) { + logger.debug("prefetching {} uuids from the repostory", exactPrefetchCount); + } + String ints = IntStream.range(0, n).mapToObj(Integer::toString).collect(Collectors.joining(" ")); + TupleQuery query = con.prepareTupleQuery( + "SELECT (UUID() as ?id) WHERE {" + + "VALUES ?index1 { " + + ints + + " } " + + "VALUES ?index2 { " + + ints + + " } " + + "VALUES ?index3 { " + + ints + + " } " + + "}"); + ArrayDeque uuids = new ArrayDeque<>(exactPrefetchCount); + try (TupleQueryResult result = query.evaluate()) { + while (result.hasNext()) { + BindingSet b = result.next(); + uuids.add(getIRI(b, "id")); + } + } + return uuids; + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/sequence/UUIDSequenceConfig.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/sequence/UUIDSequenceConfig.java new file mode 100644 index 00000000000..124eaec30f5 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/sequence/UUIDSequenceConfig.java @@ -0,0 +1,35 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.uuidsource.sequence; + +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.AutoConfiguration; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.boot.context.properties.EnableConfigurationProperties; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +import jakarta.validation.Valid; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@AutoConfiguration +@EnableConfigurationProperties(UUIDSequenceProperties.class) +@ConditionalOnProperty(prefix = "rdf4j.spring.uuidsource.sequence", name = "enabled") +public class UUIDSequenceConfig { + @Bean + public UUIDSequence getUUIDSource(@Valid @Autowired UUIDSequenceProperties properties) { + return new UUIDSequence(properties); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/sequence/UUIDSequenceProperties.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/sequence/UUIDSequenceProperties.java new file mode 100644 index 00000000000..30686bc2615 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/sequence/UUIDSequenceProperties.java @@ -0,0 +1,48 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.uuidsource.sequence; + +import org.springframework.boot.context.properties.ConfigurationProperties; + +import jakarta.validation.constraints.Min; +import jakarta.validation.constraints.NotBlank; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@ConfigurationProperties(prefix = "rdf4j.spring.uuidsource.sequence") +public class UUIDSequenceProperties { + + private boolean enabled; + + // Approximate number of UUIDs to prefetch from the repository + @NotBlank + @Min(value = 8, message = "Value must be 8 or higher!") + private int prefetchCount = 1000; + + public boolean isEnabled() { + return enabled; + } + + public void setEnabled(boolean enabled) { + this.enabled = enabled; + } + + public int getPrefetchCount() { + return prefetchCount; + } + + public void setPrefetchCount(int prefetchCount) { + this.prefetchCount = prefetchCount; + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/simple/SimpleRepositoryUUIDSource.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/simple/SimpleRepositoryUUIDSource.java new file mode 100644 index 00000000000..56dffc139ad --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/simple/SimpleRepositoryUUIDSource.java @@ -0,0 +1,36 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.uuidsource.simple; + +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.spring.support.RDF4JTemplate; +import org.eclipse.rdf4j.spring.support.UUIDSource; +import org.eclipse.rdf4j.spring.util.QueryResultUtils; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class SimpleRepositoryUUIDSource implements UUIDSource { + + @Autowired + RDF4JTemplate rdf4JTemplate; + + @Override + public IRI nextUUID() { + return rdf4JTemplate + .tupleQuery("SELECT (UUID() as ?id) WHERE {}") + .evaluateAndConvert() + .toSingleton(b -> QueryResultUtils.getIRI(b, "id")); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/simple/SimpleRepositoryUUIDSourceConfig.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/simple/SimpleRepositoryUUIDSourceConfig.java new file mode 100644 index 00000000000..28bdfd20b7e --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/simple/SimpleRepositoryUUIDSourceConfig.java @@ -0,0 +1,32 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.uuidsource.simple; + +import org.springframework.boot.autoconfigure.AutoConfiguration; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.boot.context.properties.EnableConfigurationProperties; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@AutoConfiguration +@ConditionalOnProperty(prefix = "rdf4j.spring.uuidsource.simple", name = "enabled") +@EnableConfigurationProperties(SimpleRepositoryUUIDSourceProperties.class) +public class SimpleRepositoryUUIDSourceConfig { + @Bean + public SimpleRepositoryUUIDSource getSimpleRepositoryUUIDSource() { + return new SimpleRepositoryUUIDSource(); + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/simple/SimpleRepositoryUUIDSourceProperties.java b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/simple/SimpleRepositoryUUIDSourceProperties.java new file mode 100644 index 00000000000..4cca28c96f5 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/java/org/eclipse/rdf4j/spring/uuidsource/simple/SimpleRepositoryUUIDSourceProperties.java @@ -0,0 +1,34 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.uuidsource.simple; + +import org.springframework.boot.context.properties.ConfigurationProperties; + +import jakarta.validation.constraints.NotBlank; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@ConfigurationProperties(prefix = "rdf4j.spring.uuidsource.simple") +public class SimpleRepositoryUUIDSourceProperties { + @NotBlank + private boolean enabled = false; + + public boolean isEnabled() { + return enabled; + } + + public void setEnabled(boolean enabled) { + this.enabled = enabled; + } +} diff --git a/spring6-components/rdf4j-spring6/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports b/spring6-components/rdf4j-spring6/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports new file mode 100644 index 00000000000..771b07c89ae --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports @@ -0,0 +1,13 @@ +org.eclipse.rdf4j.spring.RDF4JConfig +org.eclipse.rdf4j.spring.operationlog.OperationLogConfig +org.eclipse.rdf4j.spring.operationlog.log.jmx.OperationLogJmxConfig +org.eclipse.rdf4j.spring.operationcache.OperationCacheConfig +org.eclipse.rdf4j.spring.pool.PoolConfig +org.eclipse.rdf4j.spring.repository.remote.RemoteRepositoryConfig +org.eclipse.rdf4j.spring.repository.inmemory.InMemoryRepositoryConfig +org.eclipse.rdf4j.spring.resultcache.ResultCacheConfig +org.eclipse.rdf4j.spring.tx.TxConfig +org.eclipse.rdf4j.spring.uuidsource.noveltychecking.NoveltyCheckingUUIDSourceConfig +org.eclipse.rdf4j.spring.uuidsource.sequence.UUIDSequenceConfig +org.eclipse.rdf4j.spring.uuidsource.simple.SimpleRepositoryUUIDSourceConfig +org.eclipse.rdf4j.spring.uuidsource.predictable.PredictableUUIDSourceConfig diff --git a/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/BasicTests.java b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/BasicTests.java new file mode 100644 index 00000000000..da4bd101b0b --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/BasicTests.java @@ -0,0 +1,53 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring; + +import org.eclipse.rdf4j.model.Model; +import org.eclipse.rdf4j.rio.RDFFormat; +import org.eclipse.rdf4j.rio.Rio; +import org.eclipse.rdf4j.spring.support.RDF4JTemplate; +import org.eclipse.rdf4j.spring.util.QueryResultUtils; +import org.eclipse.rdf4j.spring.util.TypeMappingUtils; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class BasicTests extends RDF4JSpringTestBase { + @Autowired + RDF4JTemplate rdf4JTemplate; + + @Test + public void testIsTemplateWired() { + Assertions.assertNotNull(rdf4JTemplate); + } + + @Test + void testTripleCount() { + int count = rdf4JTemplate + .tupleQuery("SELECT (count(?a) as ?cnt) WHERE { ?a ?b ?c}") + .evaluateAndConvert() + .toSingleton(bs -> TypeMappingUtils.toInt( + QueryResultUtils.getValue(bs, "cnt"))); + if (count != 26) { + Model model = rdf4JTemplate.graphQuery("CONSTRUCT { ?a ?b ?c } WHERE { ?a ?b ?c }") + .evaluateAndConvert() + .toModel(); + Rio.write(model, System.out, RDFFormat.TURTLE); + } + Assertions.assertEquals(26, count); + } + +} diff --git a/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/RDF4JSpringTestBase.java b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/RDF4JSpringTestBase.java new file mode 100644 index 00000000000..4eea9018079 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/RDF4JSpringTestBase.java @@ -0,0 +1,50 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring; + +import org.eclipse.rdf4j.spring.support.DataInserter; +import org.junit.jupiter.api.BeforeAll; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.core.io.Resource; +import org.springframework.test.annotation.DirtiesContext; +import org.springframework.test.context.TestPropertySource; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; +import org.springframework.transaction.annotation.Transactional; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@Transactional +@SpringJUnitConfig(classes = { TestConfig.class }) +@TestPropertySource("classpath:application.properties") +@TestPropertySource( + properties = { + "rdf4j.spring.repository.inmemory.enabled=true", + "rdf4j.spring.repository.inmemory.use-shacl-sail=true", + "rdf4j.spring.tx.enabled=true", + "rdf4j.spring.resultcache.enabled=false", + "rdf4j.spring.operationcache.enabled=false", + "rdf4j.spring.pool.enabled=true", + "rdf4j.spring.pool.max-connections=2" + + }) +@DirtiesContext +public class RDF4JSpringTestBase { + @BeforeAll + public static void insertTestData( + @Autowired DataInserter dataInserter, + @Value("classpath:/data/example-data-artists-copy.ttl") Resource dataFile) { + dataInserter.insertData(dataFile); + } +} diff --git a/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/TestConfig.java b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/TestConfig.java new file mode 100644 index 00000000000..63c4692c463 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/TestConfig.java @@ -0,0 +1,35 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring; + +import org.eclipse.rdf4j.spring.support.DataInserter; +import org.eclipse.rdf4j.spring.test.RDF4JTestConfig; +import org.springframework.boot.test.context.TestConfiguration; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.Import; +import org.springframework.transaction.annotation.EnableTransactionManagement; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@TestConfiguration +@EnableTransactionManagement +@Import(RDF4JTestConfig.class) +@ComponentScan(basePackages = "org.eclipse.rdf4j.spring.domain") +public class TestConfig { + @Bean + DataInserter getDataInserter() { + return new DataInserter(); + } +} diff --git a/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/dao/RDF4JCrudDaoTests.java b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/dao/RDF4JCrudDaoTests.java new file mode 100644 index 00000000000..c8afa8e29b0 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/dao/RDF4JCrudDaoTests.java @@ -0,0 +1,97 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.dao; + +import java.util.Optional; +import java.util.stream.Stream; + +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.impl.SimpleValueFactory; +import org.eclipse.rdf4j.spring.RDF4JSpringTestBase; +import org.eclipse.rdf4j.spring.domain.dao.ArtistDao; +import org.eclipse.rdf4j.spring.domain.model.Artist; +import org.eclipse.rdf4j.spring.domain.model.EX; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class RDF4JCrudDaoTests extends RDF4JSpringTestBase { + @Autowired + private ArtistDao artistDao; + + @ParameterizedTest + @MethodSource + public void testRead(Artist artist) { + Artist artistFromDb = artistDao.getById(artist.getId()); + Assertions.assertEquals(artist.getFirstName(), artistFromDb.getFirstName()); + Assertions.assertEquals(artist.getLastName(), artistFromDb.getLastName()); + } + + public static Stream testRead() { + Artist picasso = new Artist(); + picasso.setFirstName("Pablo"); + picasso.setLastName("Picasso"); + picasso.setId(SimpleValueFactory.getInstance().createIRI("http://example.org/Picasso")); + Artist vanGogh = new Artist(); + vanGogh.setFirstName("Vincent"); + vanGogh.setLastName("van Gogh"); + vanGogh.setId(SimpleValueFactory.getInstance().createIRI("http://example.org/VanGogh")); + return Stream.of(picasso, vanGogh); + } + + @Test + public void testInsertThenRead() { + IRI id = EX.of("Vermeer"); + Artist a = new Artist(); + a.setId(id); + a.setLastName("Vermeer"); + a.setFirstName("Jan"); + artistDao.save(a); + Artist artistfromDb = artistDao.getById(id); + Assertions.assertEquals(a.getLastName(), artistfromDb.getLastName()); + Assertions.assertEquals(a.getFirstName(), artistfromDb.getFirstName()); + Assertions.assertEquals(a.getId(), artistfromDb.getId()); + } + + @Test + public void testModify() { + Artist a = artistDao.getById(EX.of("Picasso")); + a.setFirstName("Pablo Ruiz"); + artistDao.save(a); + Artist artistFromDb = artistDao.getById(EX.of("Picasso")); + Assertions.assertEquals(a.getLastName(), artistFromDb.getLastName()); + } + + @Test + public void testDelete() { + artistDao.delete(EX.of("Picasso")); + Optional a = artistDao.getByIdOptional(EX.of("Picasso")); + Assertions.assertTrue(a.isEmpty()); + } + + @Test + public void testInsertWithUUID() { + Artist a = new Artist(); + a.setFirstName("Munch"); + a.setLastName("Edvard"); + a = artistDao.save(a); + Assertions.assertNotNull(a.getId()); + Assertions.assertTrue(a.getId().toString().startsWith("urn:uuid:")); + } + +} diff --git a/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/dao/support/ServiceLayerTests.java b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/dao/support/ServiceLayerTests.java new file mode 100644 index 00000000000..9cfc3b007d3 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/dao/support/ServiceLayerTests.java @@ -0,0 +1,100 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.dao.support; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import org.eclipse.rdf4j.spring.RDF4JSpringTestBase; +import org.eclipse.rdf4j.spring.domain.model.Artist; +import org.eclipse.rdf4j.spring.domain.model.Painting; +import org.eclipse.rdf4j.spring.domain.service.ArtService; +import org.eclipse.rdf4j.spring.support.RDF4JTemplate; +import org.eclipse.rdf4j.spring.tx.TransactionObject; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.transaction.PlatformTransactionManager; +import org.springframework.transaction.support.DefaultTransactionStatus; +import org.springframework.transaction.support.TransactionTemplate; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class ServiceLayerTests extends RDF4JSpringTestBase { + @Autowired + private ArtService artService; + @Autowired + private RDF4JTemplate rdf4JTemplate; + @Autowired + private PlatformTransactionManager transactionManager; + private TransactionTemplate transactionTemplate; + + @BeforeEach + void setUp() { + transactionTemplate = new TransactionTemplate(transactionManager); + } + + @Test + public void testCreateArtist() { + Artist artist = artService.createArtist("Jan", "Vermeer"); + assertNotNull(artist.getId()); + assertTrue(artist.getId().toString().startsWith("urn:uuid")); + } + + @Test + public void testCreatePainting() { + Artist artist = artService.createArtist("Jan", "Vermeer"); + Painting painting = artService.createPainting("Girl with a pearl earring", "oil on canvas", artist.getId()); + assertNotNull(painting.getId()); + assertTrue(painting.getId().toString().startsWith("urn:uuid")); + } + + @Test + public void testCreatePaintingWithoutArtist() { + assertThrows(NullPointerException.class, () -> artService.createPainting( + "Girl with a pearl earring", + "oil on canvas", + null)); + } + + // TODO + @Test + public void testRollbackOnException() { + transactionTemplate.execute(status -> { + Artist artist = artService.createArtist("Jan", "Vermeer"); + // make sure we can query vermeer from the db + assertEquals(1, + rdf4JTemplate.tupleQueryFromResource( + getClass(), + "classpath:sparql/get-artists.rq") + .withBinding("artist", artist.getId()) + .evaluateAndConvert() + .toSet(BindingSetMapper.identity()) + .size()); + // now ascertain that the transaction will commit eventually + assertFalse(status.isRollbackOnly()); + // now insert a painting without artist (throws exception) + assertThrows(NullPointerException.class, () -> artService.createPainting( + "Girl with a pearl earring", + "oil on canvas", + null)); + // now ascertain that the transaction will not commit because of the exception + assertTrue(((TransactionObject) ((DefaultTransactionStatus) status).getTransaction()).isRollbackOnly()); + return null; + }); + } +} diff --git a/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/dao/support/operation/TupleQueryResultConverterTests.java b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/dao/support/operation/TupleQueryResultConverterTests.java new file mode 100644 index 00000000000..c4f173c6eaf --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/dao/support/operation/TupleQueryResultConverterTests.java @@ -0,0 +1,855 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.dao.support.operation; + +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; + +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.query.BindingSet; +import org.eclipse.rdf4j.spring.RDF4JSpringTestBase; +import org.eclipse.rdf4j.spring.dao.exception.IncorrectResultSetSizeException; +import org.eclipse.rdf4j.spring.domain.model.EX; +import org.eclipse.rdf4j.spring.support.RDF4JTemplate; +import org.eclipse.rdf4j.spring.util.QueryResultUtils; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; + +public class TupleQueryResultConverterTests extends RDF4JSpringTestBase { + + @Autowired + private RDF4JTemplate rdf4JTemplate; + + private TupleQueryResultConverter resultConverter; + + @Test + public void testConsumeResultMultiple() { + TupleQueryResultConverter converter = forMultiple(); + converter.consumeResult(tqr -> { + Assertions.assertTrue(tqr.hasNext()); + Assertions.assertEquals(1, tqr.getBindingNames().size()); + Assertions.assertEquals(2, tqr.stream().count()); + }); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testConsumeResultSingle() { + TupleQueryResultConverter converter = forSingle(); + converter.consumeResult(tqr -> { + Assertions.assertTrue(tqr.hasNext()); + Assertions.assertEquals(1, tqr.getBindingNames().size()); + Assertions.assertEquals(1, tqr.stream().count()); + }); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testConsumeResultNothing() { + TupleQueryResultConverter converter = forNothing(); + converter.consumeResult(tqr -> { + Assertions.assertFalse(tqr.hasNext()); + Assertions.assertEquals(1, tqr.getBindingNames().size()); + Assertions.assertEquals(0, tqr.stream().count()); + }); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testApplyToResultMultiple() { + TupleQueryResultConverter converter = forMultiple(); + Set artists = converter.applyToResult(res -> res.stream() + .map(b -> QueryResultUtils.getIRI(b, "artist")) + .collect(Collectors.toSet())); + Assertions.assertEquals(2, artists.size()); + Assertions.assertTrue(artists.contains(EX.Picasso)); + Assertions.assertTrue(artists.contains(EX.VanGogh)); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testApplyToResultSingle() { + TupleQueryResultConverter converter = forSingle(); + Set artists = converter.applyToResult(res -> res.stream() + .map(b -> QueryResultUtils.getIRI(b, "artist")) + .collect(Collectors.toSet())); + Assertions.assertEquals(1, artists.size()); + Assertions.assertTrue(artists.contains(EX.Picasso)); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testApplyToResultNothing() { + TupleQueryResultConverter converter = forNothing(); + Set artists = converter.applyToResult(res -> res.stream() + .map(b -> QueryResultUtils.getIRI(b, "artist")) + .collect(Collectors.toSet())); + Assertions.assertEquals(0, artists.size()); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToStreamMultiple() { + TupleQueryResultConverter converter = forMultiple(); + Set bindingSets = converter.toStream().collect(Collectors.toSet()); + Assertions.assertEquals(2, bindingSets.size()); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToStreamSingle() { + TupleQueryResultConverter converter = forSingle(); + Set bindingSets = converter.toStream().collect(Collectors.toSet()); + Assertions.assertEquals(1, bindingSets.size()); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToStreamNothing() { + TupleQueryResultConverter converter = forNothing(); + Set bindingSets = converter.toStream().collect(Collectors.toSet()); + Assertions.assertTrue(bindingSets.isEmpty()); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void toStream1Multiple() { + TupleQueryResultConverter converter = forMultiple(); + Set artists = converter.toStream(bs -> QueryResultUtils.getIRI(bs, "artist")).collect(Collectors.toSet()); + Assertions.assertEquals(2, artists.size()); + Assertions.assertTrue(artists.contains(EX.Picasso)); + Assertions.assertTrue(artists.contains(EX.VanGogh)); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void toStream1Single() { + TupleQueryResultConverter converter = forSingle(); + Set artists = converter.toStream(bs -> QueryResultUtils.getIRI(bs, "artist")).collect(Collectors.toSet()); + Assertions.assertEquals(1, artists.size()); + Assertions.assertTrue(artists.contains(EX.Picasso)); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void toStream1Nothing() { + TupleQueryResultConverter converter = forNothing(); + Set artists = converter.toStream(bs -> QueryResultUtils.getIRI(bs, "artist")).collect(Collectors.toSet()); + Assertions.assertTrue(artists.isEmpty()); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void toStream2Multiple() { + TupleQueryResultConverter converter = forMultiple(); + Set artists = converter + .toStream( + bs -> QueryResultUtils.getIRI(bs, "artist"), + Object::toString) + .collect(Collectors.toSet()); + Assertions.assertEquals(2, artists.size()); + Assertions.assertTrue(artists.contains(EX.Picasso.toString())); + Assertions.assertTrue(artists.contains(EX.VanGogh.toString())); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void toStream2Single() { + TupleQueryResultConverter converter = forSingle(); + Set artists = converter + .toStream( + bs -> QueryResultUtils.getIRI(bs, "artist"), + Object::toString) + .collect(Collectors.toSet()); + Assertions.assertEquals(1, artists.size()); + Assertions.assertTrue(artists.contains(EX.Picasso.toString())); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void toStream2Nothing() { + TupleQueryResultConverter converter = forNothing(); + Set artists = converter + .toStream( + bs -> QueryResultUtils.getIRI(bs, "artist"), + Object::toString) + .collect(Collectors.toSet()); + Assertions.assertTrue(artists.isEmpty()); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonMaybeOfWholeResultMultiple() { + TupleQueryResultConverter converter = forMultiple(); + Set artists = converter.toSingletonMaybeOfWholeResult(tqr -> { + if (!tqr.hasNext()) { + return null; + } + return tqr.stream() + .map(bs -> QueryResultUtils.getIRI(bs, "artist")) + .collect(Collectors.toSet()); + }); + Assertions.assertEquals(2, artists.size()); + Assertions.assertTrue(artists.contains(EX.Picasso)); + Assertions.assertTrue(artists.contains(EX.VanGogh)); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonMaybeOfWholeResultSingle() { + TupleQueryResultConverter converter = forSingle(); + Set artists = converter.toSingletonMaybeOfWholeResult(tqr -> { + if (!tqr.hasNext()) { + return null; + } + return tqr.stream() + .map(bs -> QueryResultUtils.getIRI(bs, "artist")) + .collect(Collectors.toSet()); + }); + Assertions.assertEquals(1, artists.size()); + Assertions.assertTrue(artists.contains(EX.Picasso)); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonMaybeOfWholeResultNothing() { + TupleQueryResultConverter converter = forNothing(); + Set artists = converter.toSingletonMaybeOfWholeResult(tqr -> { + if (!tqr.hasNext()) { + return null; + } + return tqr.stream() + .map(bs -> QueryResultUtils.getIRI(bs, "artist")) + .collect(Collectors.toSet()); + }); + Assertions.assertNull(artists); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonOptionalOfWholeResultMultiple() { + TupleQueryResultConverter converter = forMultiple(); + Optional> artists = converter.toSingletonOptionalOfWholeResult(tqr -> { + if (!tqr.hasNext()) { + return null; + } + return tqr.stream() + .map(bs -> QueryResultUtils.getIRI(bs, "artist")) + .collect(Collectors.toSet()); + }); + Assertions.assertTrue(artists.isPresent()); + Assertions.assertEquals(2, artists.get().size()); + Assertions.assertTrue(artists.get().contains(EX.Picasso)); + Assertions.assertTrue(artists.get().contains(EX.VanGogh)); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonOptionalOfWholeResultSingle() { + TupleQueryResultConverter converter = forSingle(); + Optional> artists = converter.toSingletonOptionalOfWholeResult(tqr -> { + if (!tqr.hasNext()) { + return null; + } + return tqr.stream() + .map(bs -> QueryResultUtils.getIRI(bs, "artist")) + .collect(Collectors.toSet()); + }); + Assertions.assertEquals(1, artists.get().size()); + Assertions.assertTrue(artists.get().contains(EX.Picasso)); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonOptionalOfWholeResultNothing() { + TupleQueryResultConverter converter = forNothing(); + Optional> artists = converter.toSingletonOptionalOfWholeResult(tqr -> { + if (!tqr.hasNext()) { + return null; + } + return tqr.stream() + .map(bs -> QueryResultUtils.getIRI(bs, "artist")) + .collect(Collectors.toSet()); + }); + Assertions.assertTrue(artists.isEmpty()); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonOfWholeResultMultiple() { + TupleQueryResultConverter converter = forMultiple(); + Set artists = converter.toSingletonOfWholeResult(tqr -> { + if (!tqr.hasNext()) { + return null; + } + return tqr.stream() + .map(bs -> QueryResultUtils.getIRI(bs, "artist")) + .collect(Collectors.toSet()); + }); + Assertions.assertEquals(2, artists.size()); + Assertions.assertTrue(artists.contains(EX.Picasso)); + Assertions.assertTrue(artists.contains(EX.VanGogh)); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonOfWholeResultSingle() { + TupleQueryResultConverter converter = forSingle(); + Set artists = converter.toSingletonOfWholeResult(tqr -> { + if (!tqr.hasNext()) { + return null; + } + return tqr.stream() + .map(bs -> QueryResultUtils.getIRI(bs, "artist")) + .collect(Collectors.toSet()); + }); + Assertions.assertEquals(1, artists.size()); + Assertions.assertTrue(artists.contains(EX.Picasso)); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonOfWholeResultNothing() { + TupleQueryResultConverter converter = forNothing(); + Assertions.assertThrows(IncorrectResultSetSizeException.class, () -> converter.toSingletonOfWholeResult(tqr -> { + if (!tqr.hasNext()) { + return null; + } + return tqr.stream() + .map(bs -> QueryResultUtils.getIRI(bs, "artist")) + .collect(Collectors.toSet()); + }) + ); + } + + @Test + public void testToSingletonMaybeMultiple() { + TupleQueryResultConverter converter = forMultiple(); + Assertions.assertThrows( + IncorrectResultSetSizeException.class, + () -> converter.toSingletonMaybe(bs -> QueryResultUtils.getIRI(bs, "artist"))); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonMaybeMultipleToNull() { + TupleQueryResultConverter converter = forMultiple(); + Assertions.assertNull(converter.toSingletonMaybe(bs -> null)); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonMaybeSingle() { + TupleQueryResultConverter converter = forSingle(); + Assertions.assertEquals(EX.Picasso, converter.toSingletonMaybe(bs -> QueryResultUtils.getIRI(bs, "artist"))); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonMaybeSingleToNull() { + TupleQueryResultConverter converter = forSingle(); + Assertions.assertNull(converter.toSingletonMaybe(bs -> null)); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonMaybeNothing() { + TupleQueryResultConverter converter = forNothing(); + Assertions.assertNull(converter.toSingletonMaybe(bs -> QueryResultUtils.getIRI(bs, "artist"))); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonMaybeNothingToNull() { + TupleQueryResultConverter converter = forNothing(); + Assertions.assertNull(converter.toSingletonMaybe(bs -> null)); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonOptionalMultiple() { + TupleQueryResultConverter converter = forMultiple(); + Assertions.assertThrows( + IncorrectResultSetSizeException.class, + () -> converter.toSingletonOptional(bs -> QueryResultUtils.getIRI(bs, "artist"))); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonOptionalMultipleToNull() { + TupleQueryResultConverter converter = forMultiple(); + Assertions.assertTrue(converter.toSingletonOptional(bs -> null).isEmpty()); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonOptionalSingle() { + TupleQueryResultConverter converter = forSingle(); + Assertions.assertEquals(EX.Picasso, + converter.toSingletonOptional(bs -> QueryResultUtils.getIRI(bs, "artist")).get()); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonOptionalSingleToNull() { + TupleQueryResultConverter converter = forSingle(); + Assertions.assertTrue(converter.toSingletonOptional(bs -> null).isEmpty()); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonOptionalNothing() { + TupleQueryResultConverter converter = forNothing(); + Assertions.assertTrue(converter.toSingletonOptional( + bs -> QueryResultUtils.getIRI(bs, "artist")) + .isEmpty()); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonOptionalNothingToNull() { + TupleQueryResultConverter converter = forNothing(); + Assertions.assertTrue(converter.toSingletonOptional(bs -> null).isEmpty()); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonMultiple() { + TupleQueryResultConverter converter = forMultiple(); + Assertions.assertThrows( + IncorrectResultSetSizeException.class, + () -> converter.toSingleton(bs -> QueryResultUtils.getIRI(bs, "artist"))); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonMultipleToNull() { + TupleQueryResultConverter converter = forMultiple(); + Assertions.assertThrows(IncorrectResultSetSizeException.class, + () -> converter.toSingleton(bs -> null)); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonSingle() { + TupleQueryResultConverter converter = forSingle(); + Assertions.assertEquals(EX.Picasso, converter.toSingleton(bs -> QueryResultUtils.getIRI(bs, "artist"))); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonSingleToNull() { + TupleQueryResultConverter converter = forSingle(); + Assertions.assertThrows( + IncorrectResultSetSizeException.class, + () -> converter.toSingleton(bs -> null)); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonNothing() { + TupleQueryResultConverter converter = forNothing(); + Assertions.assertThrows(IncorrectResultSetSizeException.class, + () -> converter.toSingleton(bs -> QueryResultUtils.getIRI(bs, "artist"))); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonNothingToNull() { + TupleQueryResultConverter converter = forNothing(); + Assertions.assertThrows(IncorrectResultSetSizeException.class, + () -> converter.toSingleton(bs -> null)); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonMaybe2Multiple() { + TupleQueryResultConverter converter = forMultiple(); + Assertions.assertThrows( + IncorrectResultSetSizeException.class, + () -> converter.toSingletonMaybe( + bs -> QueryResultUtils.getIRI(bs, "artist"), + IRI::toString)); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonMaybe2MultipleToNull() { + TupleQueryResultConverter converter = forMultiple(); + Assertions.assertNull(converter.toSingletonMaybe(bs -> null, IRI::toString)); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonMaybe2Single() { + TupleQueryResultConverter converter = forSingle(); + Assertions.assertEquals(EX.Picasso.toString(), converter.toSingletonMaybe( + bs -> QueryResultUtils.getIRI(bs, "artist"), + IRI::toString)); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonMaybe2SingleToNull() { + TupleQueryResultConverter converter = forSingle(); + Assertions.assertNull(converter.toSingletonMaybe(bs -> null, IRI::toString)); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonMaybe2Nothing() { + TupleQueryResultConverter converter = forNothing(); + Assertions.assertNull(converter.toSingletonMaybe( + bs -> QueryResultUtils.getIRI(bs, "artist"), IRI::toString)); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonMaybe2NothingToNull() { + TupleQueryResultConverter converter = forNothing(); + Assertions.assertNull(converter.toSingletonMaybe(bs -> null, IRI::toString)); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonOptional2Multiple() { + TupleQueryResultConverter converter = forMultiple(); + Assertions.assertThrows( + IncorrectResultSetSizeException.class, + () -> converter.toSingletonOptional( + bs -> QueryResultUtils.getIRI(bs, "artist"), + IRI::toString)); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonOptional2MultipleToNull() { + TupleQueryResultConverter converter = forMultiple(); + Assertions.assertTrue(converter.toSingletonOptional(bs -> null, IRI::toString).isEmpty()); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonOptional2Single() { + TupleQueryResultConverter converter = forSingle(); + Assertions.assertEquals(EX.Picasso.toString(), converter.toSingletonOptional( + bs -> QueryResultUtils.getIRI(bs, "artist"), + IRI::toString).get()); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonOptional2SingleToNull() { + TupleQueryResultConverter converter = forSingle(); + Assertions.assertTrue(converter.toSingletonOptional(bs -> null, + IRI::toString).isEmpty()); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonOptional2Nothing() { + TupleQueryResultConverter converter = forNothing(); + Assertions.assertTrue(converter.toSingletonOptional( + bs -> QueryResultUtils.getIRI(bs, "artist"), + IRI::toString) + .isEmpty()); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingletonOptional2NothingToNull() { + TupleQueryResultConverter converter = forNothing(); + Assertions.assertTrue(converter.toSingletonOptional(bs -> null, + IRI::toString).isEmpty()); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingleton2Multiple() { + TupleQueryResultConverter converter = forMultiple(); + Assertions.assertThrows( + IncorrectResultSetSizeException.class, + () -> converter.toSingleton( + bs -> QueryResultUtils.getIRI(bs, "artist"), + IRI::toString)); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingleton2MultipleToNull() { + TupleQueryResultConverter converter = forMultiple(); + Assertions.assertThrows(IncorrectResultSetSizeException.class, + () -> converter.toSingleton( + bs -> null, + IRI::toString)); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingleton2Single() { + TupleQueryResultConverter converter = forSingle(); + Assertions.assertEquals(EX.Picasso.toString(), converter.toSingleton( + bs -> QueryResultUtils.getIRI(bs, "artist"), + IRI::toString)); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingleton2SingleToNull() { + TupleQueryResultConverter converter = forSingle(); + Assertions.assertThrows( + IncorrectResultSetSizeException.class, + () -> converter.toSingleton(bs -> null, IRI::toString)); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingleton2Nothing() { + TupleQueryResultConverter converter = forNothing(); + Assertions.assertThrows(IncorrectResultSetSizeException.class, + () -> converter.toSingleton( + bs -> QueryResultUtils.getIRI(bs, "artist"), + IRI::toString)); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSingleton2NothingToNull() { + TupleQueryResultConverter converter = forNothing(); + Assertions.assertThrows(IncorrectResultSetSizeException.class, + () -> converter.toSingleton(bs -> null, IRI::toString)); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToListMultiple() { + TupleQueryResultConverter converter = forMultiple(); + List artists = converter.toList(bs -> QueryResultUtils.getIRI(bs, "artist")); + Assertions.assertEquals(2, artists.size()); + Assertions.assertTrue(artists.containsAll(Set.of(EX.Picasso, EX.VanGogh))); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToListMultipleToNull() { + TupleQueryResultConverter converter = forMultiple(); + List artists = converter.toList(bs -> null); + Assertions.assertEquals(0, artists.size()); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToListSingle() { + TupleQueryResultConverter converter = forSingle(); + List artists = converter.toList(bs -> QueryResultUtils.getIRI(bs, "artist")); + Assertions.assertEquals(1, artists.size()); + Assertions.assertTrue(artists.contains(EX.Picasso)); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToListNothing() { + TupleQueryResultConverter converter = forNothing(); + List artists = converter.toList(bs -> QueryResultUtils.getIRI(bs, "artist")); + Assertions.assertEquals(0, artists.size()); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToList2Multiple() { + TupleQueryResultConverter converter = forMultiple(); + List artists = converter.toList(bs -> QueryResultUtils.getIRI(bs, "artist"), IRI::toString); + Assertions.assertEquals(2, artists.size()); + Assertions.assertTrue(artists.containsAll(Set.of(EX.Picasso.toString(), EX.VanGogh.toString()))); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToList2MultipleToNull() { + TupleQueryResultConverter converter = forMultiple(); + List artists = converter.toList(bs -> null, IRI::toString); + Assertions.assertEquals(0, artists.size()); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToList2MultiplePostprocessToNull() { + TupleQueryResultConverter converter = forMultiple(); + List artists = converter.toList(bs -> QueryResultUtils.getIRI(bs, "artist"), x -> null); + Assertions.assertEquals(0, artists.size()); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToList2Single() { + TupleQueryResultConverter converter = forSingle(); + List artists = converter.toList(bs -> QueryResultUtils.getIRI(bs, "artist"), IRI::toString); + Assertions.assertEquals(1, artists.size()); + Assertions.assertTrue(artists.contains(EX.Picasso.toString())); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToList2Nothing() { + TupleQueryResultConverter converter = forNothing(); + List artists = converter.toList(bs -> QueryResultUtils.getIRI(bs, "artist"), IRI::toString); + Assertions.assertEquals(0, artists.size()); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSetMultiple() { + TupleQueryResultConverter converter = forMultiple(); + Set artists = converter.toSet(bs -> QueryResultUtils.getIRI(bs, "artist")); + Assertions.assertEquals(2, artists.size()); + Assertions.assertTrue(artists.containsAll(Set.of(EX.Picasso, EX.VanGogh))); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSetMultipleToNull() { + TupleQueryResultConverter converter = forMultiple(); + Set artists = converter.toSet(bs -> null); + Assertions.assertEquals(0, artists.size()); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSetSingle() { + TupleQueryResultConverter converter = forSingle(); + Set artists = converter.toSet(bs -> QueryResultUtils.getIRI(bs, "artist")); + Assertions.assertEquals(1, artists.size()); + Assertions.assertTrue(artists.contains(EX.Picasso)); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSetNothing() { + TupleQueryResultConverter converter = forNothing(); + Set artists = converter.toSet(bs -> QueryResultUtils.getIRI(bs, "artist")); + Assertions.assertEquals(0, artists.size()); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSet2Multiple() { + TupleQueryResultConverter converter = forMultiple(); + Set artists = converter.toSet(bs -> QueryResultUtils.getIRI(bs, "artist"), IRI::toString); + Assertions.assertEquals(2, artists.size()); + Assertions.assertTrue(artists.containsAll(Set.of(EX.Picasso.toString(), EX.VanGogh.toString()))); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSet2MultipleToNull() { + TupleQueryResultConverter converter = forMultiple(); + Set artists = converter.toSet(bs -> null, IRI::toString); + Assertions.assertEquals(0, artists.size()); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSet2MultiplePostprocessToNull() { + TupleQueryResultConverter converter = forMultiple(); + Set artists = converter.toSet(bs -> QueryResultUtils.getIRI(bs, "artist"), x -> null); + Assertions.assertEquals(0, artists.size()); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSet2Single() { + TupleQueryResultConverter converter = forSingle(); + Set artists = converter.toSet(bs -> QueryResultUtils.getIRI(bs, "artist"), IRI::toString); + Assertions.assertEquals(1, artists.size()); + Assertions.assertTrue(artists.contains(EX.Picasso.toString())); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToSet2Nothing() { + TupleQueryResultConverter converter = forNothing(); + Set artists = converter.toSet(bs -> QueryResultUtils.getIRI(bs, "artist"), IRI::toString); + Assertions.assertEquals(0, artists.size()); + Assertions.assertThrows(NullPointerException.class, converter::toStream); + } + + @Test + public void testToMapOfSet() { + Map> paintings = rdf4JTemplate.tupleQueryFromResource( + getClass(), + "classpath:sparql/get-paintings-of-artist2.rq") + .evaluateAndConvert() + .toMapOfSet(bs -> QueryResultUtils.getIRI(bs, "artist"), + bs -> QueryResultUtils.getIRI(bs, "painting")); + Assertions.assertEquals(2, paintings.keySet().size()); + Assertions.assertNotNull(paintings.get(EX.Picasso)); + Assertions.assertNotNull(paintings.get(EX.VanGogh)); + Assertions.assertEquals(1, paintings.get(EX.Picasso).size()); + Assertions.assertEquals(3, paintings.get(EX.VanGogh).size()); + } + + /** + * TODO + * + * @Test public void testToMap() { + *

+ * } + *

+ * public Map> toMapOfList( Function keyMapper, Function + * valueMapper) { return resultConverter.toMapOfList(keyMapper, valueMapper); } + *

+ * public Map toMap(BindingSetMapper mapper, Function keyMapper, Function + * valueMapper) { return resultConverter.toMap(mapper, keyMapper, valueMapper); } + *

+ * public Map toMap( Function> entryMapper) { return + * resultConverter.toMap(entryMapper); } + *

+ * public Map> toMapOfSet( BindingSetMapper mapper, Function keyMapper, + * Function valueMapper) { return resultConverter.toMapOfSet(mapper, keyMapper, valueMapper); } + *

+ * public Map> toMapOfList( BindingSetMapper mapper, Function keyMapper, + * Function valueMapper) { return resultConverter.toMapOfList(mapper, keyMapper, valueMapper); } + *

+ * public Stream getBindingStream( TupleQueryResult result) { return + * resultConverter.getBindingStream(result); } + */ + + private TupleQueryResultConverter forMultiple() { + return rdf4JTemplate.tupleQueryFromResource( + getClass(), + "classpath:sparql/get-artists.rq").evaluateAndConvert(); + + } + + private TupleQueryResultConverter forSingle() { + return rdf4JTemplate.tupleQueryFromResource( + getClass(), + "classpath:sparql/get-artists.rq") + .withBinding("artist", EX.Picasso) + .evaluateAndConvert(); + + } + + private TupleQueryResultConverter forNothing() { + return rdf4JTemplate.tupleQueryFromResource( + getClass(), + "classpath:sparql/get-artists.rq") + .withBinding("artist", EX.of("Vermeer")) + .evaluateAndConvert(); + + } + +} diff --git a/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/domain/dao/ArtistDao.java b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/domain/dao/ArtistDao.java new file mode 100644 index 00000000000..bbe68a2738d --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/domain/dao/ArtistDao.java @@ -0,0 +1,96 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.domain.dao; + +import static org.eclipse.rdf4j.sparqlbuilder.rdf.Rdf.iri; +import static org.eclipse.rdf4j.spring.domain.model.Artist.ARTIST_FIRST_NAME; +import static org.eclipse.rdf4j.spring.domain.model.Artist.ARTIST_ID; +import static org.eclipse.rdf4j.spring.domain.model.Artist.ARTIST_LAST_NAME; + +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.vocabulary.FOAF; +import org.eclipse.rdf4j.query.BindingSet; +import org.eclipse.rdf4j.sparqlbuilder.core.query.Queries; +import org.eclipse.rdf4j.spring.dao.SimpleRDF4JCRUDDao; +import org.eclipse.rdf4j.spring.dao.support.bindingsBuilder.MutableBindings; +import org.eclipse.rdf4j.spring.dao.support.sparql.NamedSparqlSupplier; +import org.eclipse.rdf4j.spring.domain.model.Artist; +import org.eclipse.rdf4j.spring.domain.model.EX; +import org.eclipse.rdf4j.spring.support.RDF4JTemplate; +import org.eclipse.rdf4j.spring.util.QueryResultUtils; +import org.springframework.stereotype.Component; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@Component +public class ArtistDao extends SimpleRDF4JCRUDDao { + + public ArtistDao(RDF4JTemplate rdf4JTemplate) { + super(rdf4JTemplate); + } + + @Override + protected void populateIdBindings(MutableBindings bindingsBuilder, IRI iri) { + bindingsBuilder.add(ARTIST_ID, iri); + } + + @Override + protected void populateBindingsForUpdate(MutableBindings bindingsBuilder, Artist artist) { + bindingsBuilder + .add(ARTIST_FIRST_NAME, artist.getFirstName()) + .add(ARTIST_LAST_NAME, artist.getLastName()); + } + + @Override + protected NamedSparqlSupplierPreparer prepareNamedSparqlSuppliers(NamedSparqlSupplierPreparer preparer) { + return null; + } + + @Override + protected Artist mapSolution(BindingSet querySolution) { + Artist artist = new Artist(); + artist.setId(QueryResultUtils.getIRI(querySolution, ARTIST_ID)); + artist.setFirstName(QueryResultUtils.getString(querySolution, ARTIST_FIRST_NAME)); + artist.setLastName(QueryResultUtils.getString(querySolution, ARTIST_LAST_NAME)); + return artist; + } + + @Override + protected String getReadQuery() { + return "prefix foaf: " + + "prefix ex: " + + "SELECT ?artist_id ?artist_firstName ?artist_lastName where {" + + "?artist_id a ex:Artist; " + + " foaf:firstName ?artist_firstName; " + + " foaf:surname ?artist_lastName ." + + " } "; + } + + @Override + protected NamedSparqlSupplier getInsertSparql(Artist artist) { + return NamedSparqlSupplier.of("insert", () -> Queries.INSERT(ARTIST_ID.isA(iri(EX.Artist)) + .andHas(iri(FOAF.FIRST_NAME), ARTIST_FIRST_NAME) + .andHas(iri(FOAF.SURNAME), ARTIST_LAST_NAME)) + .getQueryString()); + } + + @Override + protected IRI getInputId(Artist artist) { + if (artist.getId() == null) { + return getRdf4JTemplate().getNewUUID(); + } + return artist.getId(); + } + +} diff --git a/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/domain/dao/PaintingDao.java b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/domain/dao/PaintingDao.java new file mode 100644 index 00000000000..0cd1bd72034 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/domain/dao/PaintingDao.java @@ -0,0 +1,100 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.domain.dao; + +import static org.eclipse.rdf4j.sparqlbuilder.rdf.Rdf.iri; +import static org.eclipse.rdf4j.spring.domain.model.Painting.PAINTING_ARTIST_ID; +import static org.eclipse.rdf4j.spring.domain.model.Painting.PAINTING_ID; +import static org.eclipse.rdf4j.spring.domain.model.Painting.PAINTING_LABEL; +import static org.eclipse.rdf4j.spring.domain.model.Painting.PAINTING_TECHNIQUE; + +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.vocabulary.RDFS; +import org.eclipse.rdf4j.query.BindingSet; +import org.eclipse.rdf4j.sparqlbuilder.core.query.Queries; +import org.eclipse.rdf4j.spring.dao.SimpleRDF4JCRUDDao; +import org.eclipse.rdf4j.spring.dao.support.bindingsBuilder.MutableBindings; +import org.eclipse.rdf4j.spring.dao.support.sparql.NamedSparqlSupplier; +import org.eclipse.rdf4j.spring.domain.model.EX; +import org.eclipse.rdf4j.spring.domain.model.Painting; +import org.eclipse.rdf4j.spring.support.RDF4JTemplate; +import org.eclipse.rdf4j.spring.util.QueryResultUtils; +import org.springframework.stereotype.Component; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@Component +public class PaintingDao extends SimpleRDF4JCRUDDao { + + public PaintingDao(RDF4JTemplate rdf4JTemplate) { + super(rdf4JTemplate); + } + + @Override + protected void populateIdBindings(MutableBindings bindingsBuilder, IRI iri) { + bindingsBuilder.add(PAINTING_ID, iri); + } + + @Override + protected NamedSparqlSupplierPreparer prepareNamedSparqlSuppliers(NamedSparqlSupplierPreparer preparer) { + return null; + } + + @Override + protected Painting mapSolution(BindingSet querySolution) { + Painting painting = new Painting(); + painting.setId(QueryResultUtils.getIRI(querySolution, PAINTING_ID)); + painting.setTechnique(QueryResultUtils.getString(querySolution, PAINTING_TECHNIQUE)); + painting.setTitle(QueryResultUtils.getString(querySolution, PAINTING_LABEL)); + painting.setArtistId(QueryResultUtils.getIRI(querySolution, PAINTING_ARTIST_ID)); + return painting; + } + + @Override + protected String getReadQuery() { + return Queries.SELECT(PAINTING_ID, PAINTING_LABEL, PAINTING_TECHNIQUE, PAINTING_ARTIST_ID) + .where( + PAINTING_ID.isA(iri(EX.Painting)) + .andHas(iri(EX.technique), PAINTING_TECHNIQUE) + .andHas(iri(RDFS.LABEL), PAINTING_LABEL), + PAINTING_ARTIST_ID.has(iri(EX.creatorOf), PAINTING_ID)) + .getQueryString(); + } + + @Override + protected NamedSparqlSupplier getInsertSparql(Painting painting) { + return NamedSparqlSupplier.of("insert", () -> Queries.INSERT( + PAINTING_ID.isA(iri(EX.Painting)) + .andHas(iri(EX.technique), PAINTING_TECHNIQUE) + .andHas(iri(RDFS.LABEL), PAINTING_LABEL), + PAINTING_ARTIST_ID.has(iri(EX.creatorOf), PAINTING_ID)) + .getQueryString()); + } + + @Override + protected void populateBindingsForUpdate(MutableBindings bindingsBuilder, Painting painting) { + bindingsBuilder + .add(PAINTING_LABEL, painting.getTitle()) + .add(PAINTING_TECHNIQUE, painting.getTechnique()) + .add(PAINTING_ARTIST_ID, painting.getArtistId()); + } + + @Override + protected IRI getInputId(Painting painting) { + if (painting.getId() == null) { + return getRdf4JTemplate().getNewUUID(); + } + return painting.getId(); + } +} diff --git a/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/domain/model/Artist.java b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/domain/model/Artist.java new file mode 100644 index 00000000000..785009b169e --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/domain/model/Artist.java @@ -0,0 +1,53 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.domain.model; + +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.sparqlbuilder.core.SparqlBuilder; +import org.eclipse.rdf4j.sparqlbuilder.core.Variable; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class Artist { + public static final Variable ARTIST_ID = SparqlBuilder.var("artist_id"); + public static final Variable ARTIST_FIRST_NAME = SparqlBuilder.var("artist_firstName"); + public static final Variable ARTIST_LAST_NAME = SparqlBuilder.var("artist_lastName"); + private IRI id; + private String firstName; + private String lastName; + + public String getFirstName() { + return firstName; + } + + public void setFirstName(String firstName) { + this.firstName = firstName; + } + + public String getLastName() { + return lastName; + } + + public void setLastName(String lastName) { + this.lastName = lastName; + } + + public IRI getId() { + return id; + } + + public void setId(IRI id) { + this.id = id; + } +} diff --git a/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/domain/model/EX.java b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/domain/model/EX.java new file mode 100644 index 00000000000..d68ef9b6d9d --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/domain/model/EX.java @@ -0,0 +1,41 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.domain.model; + +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.impl.SimpleValueFactory; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class EX { + private static final String base = "http://example.org/"; + public static final IRI Artist = SimpleValueFactory.getInstance().createIRI(base, "Artist"); + public static final IRI Gallery = SimpleValueFactory.getInstance().createIRI(base, "Gallery"); + public static final IRI Painting = SimpleValueFactory.getInstance().createIRI(base, "Painting"); + public static final IRI Picasso = SimpleValueFactory.getInstance().createIRI(base, "Picasso"); + public static final IRI VanGogh = SimpleValueFactory.getInstance().createIRI(base, "VanGogh"); + public static final IRI street = SimpleValueFactory.getInstance().createIRI(base, "street"); + public static final IRI city = SimpleValueFactory.getInstance().createIRI(base, "city"); + public static final IRI country = SimpleValueFactory.getInstance().createIRI(base, "country"); + public static final IRI creatorOf = SimpleValueFactory.getInstance().createIRI(base, "creatorOf"); + public static final IRI technique = SimpleValueFactory.getInstance().createIRI(base, "technique"); + public static final IRI starryNight = SimpleValueFactory.getInstance().createIRI(base, "starryNight"); + public static final IRI sunflowers = SimpleValueFactory.getInstance().createIRI(base, "sunflowers"); + public static final IRI potatoEaters = SimpleValueFactory.getInstance().createIRI(base, "potatoEaters"); + public static final IRI guernica = SimpleValueFactory.getInstance().createIRI(base, "guernica"); + + public static IRI of(String localName) { + return SimpleValueFactory.getInstance().createIRI(base, localName); + } +} diff --git a/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/domain/model/Painting.java b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/domain/model/Painting.java new file mode 100644 index 00000000000..0a556f218d8 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/domain/model/Painting.java @@ -0,0 +1,64 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.domain.model; + +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.sparqlbuilder.core.SparqlBuilder; +import org.eclipse.rdf4j.sparqlbuilder.core.Variable; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class Painting { + public static final Variable PAINTING_ID = SparqlBuilder.var("painting_id"); + public static final Variable PAINTING_ARTIST_ID = SparqlBuilder.var("painting_artist_id"); + public static final Variable PAINTING_TECHNIQUE = SparqlBuilder.var("painting_technique"); + public static final Variable PAINTING_LABEL = SparqlBuilder.var("painting_label"); + + private IRI id; + private String title; + private String technique; + private IRI artistId; + + public IRI getId() { + return id; + } + + public void setId(IRI id) { + this.id = id; + } + + public String getTitle() { + return title; + } + + public void setTitle(String title) { + this.title = title; + } + + public String getTechnique() { + return technique; + } + + public void setTechnique(String technique) { + this.technique = technique; + } + + public IRI getArtistId() { + return artistId; + } + + public void setArtistId(IRI artistId) { + this.artistId = artistId; + } +} diff --git a/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/domain/service/ArtService.java b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/domain/service/ArtService.java new file mode 100644 index 00000000000..6dd563489c5 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/domain/service/ArtService.java @@ -0,0 +1,49 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.domain.service; + +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.spring.domain.dao.ArtistDao; +import org.eclipse.rdf4j.spring.domain.dao.PaintingDao; +import org.eclipse.rdf4j.spring.domain.model.Artist; +import org.eclipse.rdf4j.spring.domain.model.Painting; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.stereotype.Component; +import org.springframework.transaction.annotation.Propagation; +import org.springframework.transaction.annotation.Transactional; + +@Component +public class ArtService { + @Autowired + private ArtistDao artistDao; + + @Autowired + private PaintingDao paintingDao; + + @Transactional(propagation = Propagation.REQUIRED) + public Artist createArtist(String firstName, String lastName) { + Artist artist = new Artist(); + artist.setFirstName(firstName); + artist.setLastName(lastName); + return artistDao.save(artist); + } + + @Transactional(propagation = Propagation.REQUIRED) + public Painting createPainting(String title, String technique, IRI artist) { + Painting painting = new Painting(); + painting.setTitle(title); + painting.setTechnique(technique); + painting.setArtistId(artist); + return paintingDao.save(painting); + } + +} diff --git a/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/readonly/ReadonlyTests.java b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/readonly/ReadonlyTests.java new file mode 100644 index 00000000000..c0aa5c8aa8a --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/readonly/ReadonlyTests.java @@ -0,0 +1,107 @@ +/******************************************************************************* + * Copyright (c) 2022 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.readonly; + +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.util.Optional; + +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.spring.TestConfig; +import org.eclipse.rdf4j.spring.domain.dao.ArtistDao; +import org.eclipse.rdf4j.spring.domain.model.Artist; +import org.eclipse.rdf4j.spring.operationlog.OperationLogConfig; +import org.eclipse.rdf4j.spring.operationlog.log.jmx.OperationLogJmxConfig; +import org.eclipse.rdf4j.spring.pool.PoolConfig; +import org.eclipse.rdf4j.spring.repository.inmemory.InMemoryRepositoryConfig; +import org.eclipse.rdf4j.spring.repository.remote.RemoteRepositoryConfig; +import org.eclipse.rdf4j.spring.resultcache.ResultCacheConfig; +import org.eclipse.rdf4j.spring.tx.TxConfig; +import org.eclipse.rdf4j.spring.tx.exception.WriteDeniedException; +import org.eclipse.rdf4j.spring.uuidsource.noveltychecking.NoveltyCheckingUUIDSourceConfig; +import org.eclipse.rdf4j.spring.uuidsource.sequence.UUIDSequenceConfig; +import org.eclipse.rdf4j.spring.uuidsource.simple.SimpleRepositoryUUIDSourceConfig; +import org.junit.jupiter.api.Order; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.Configuration; +import org.springframework.test.context.TestPropertySource; +import org.springframework.test.context.junit.jupiter.SpringJUnitConfig; + +@SpringJUnitConfig( + classes = { + TestConfig.class, + InMemoryRepositoryConfig.class, + RemoteRepositoryConfig.class, + PoolConfig.class, + ResultCacheConfig.class, + TxConfig.class, + OperationLogConfig.class, + OperationLogJmxConfig.class, + UUIDSequenceConfig.class, + NoveltyCheckingUUIDSourceConfig.class, + SimpleRepositoryUUIDSourceConfig.class, + ReadonlyTests.Config.class + }) +@ComponentScan( + value = { + "at.researchstudio.sat.merkmalservice.readonly", + "at.researchstudio.sat.merkmalservice.service" + }) +@TestPropertySource("classpath:application.properties") +@TestPropertySource( + properties = { + "rdf4j.spring.repository.inmemory.enabled=true", + "rdf4j.spring.repository.inmemory.use-shacl-sail=true", + "rdf4j.spring.tx.enabled=true" + }) +public class ReadonlyTests { + + @Configuration + public static class Config { + @Bean + public TestHelperService getTestHelperService(@Autowired ArtistDao artistDao) { + return new TestHelperService(artistDao); + } + } + + @Autowired + TestHelperService testHelperService; + + private static IRI projectId = null; + + @Test + @Order(1) + public void testReadonlyTransactionBehaviour() { + projectId = testHelperService.createArtist(); + assertNotNull(projectId); + } + + @Test + @Order(2) + public void testReadonlyTransactionBehaviour2() { + Optional artist = testHelperService.loadProject(projectId); + assertTrue(artist.isPresent()); + } + + @Test + @Order(3) + public void test3() { + assertThrows( + WriteDeniedException.class, + () -> testHelperService.createProjectInReadonlyTransaction()); + } +} diff --git a/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/readonly/TestHelperService.java b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/readonly/TestHelperService.java new file mode 100644 index 00000000000..40220599d6f --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/readonly/TestHelperService.java @@ -0,0 +1,57 @@ +/******************************************************************************* + * Copyright (c) 2022 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.readonly; + +import static org.junit.jupiter.api.Assertions.assertNotNull; + +import java.util.Optional; + +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.spring.domain.dao.ArtistDao; +import org.eclipse.rdf4j.spring.domain.model.Artist; +import org.springframework.stereotype.Component; +import org.springframework.transaction.annotation.Propagation; +import org.springframework.transaction.annotation.Transactional; + +@Component +public class TestHelperService { + ArtistDao artistDao; + + public TestHelperService(ArtistDao artistDao) { + this.artistDao = artistDao; + } + + @Transactional(propagation = Propagation.REQUIRES_NEW) + public IRI createArtist() { + Artist artist = new Artist(); + artist.setFirstName("Leonardo"); + artist.setLastName("Da Vinci"); + Artist created = artistDao.save(artist); + assertNotNull(created.getId()); + return created.getId(); + } + + @Transactional(propagation = Propagation.REQUIRES_NEW, readOnly = true) + public IRI createProjectInReadonlyTransaction() { + Artist artist = new Artist(); + artist.setFirstName("Leonardo"); + artist.setLastName("Da Vinci"); + Artist created = artistDao.save(artist); + assertNotNull(created.getId()); + return created.getId(); + } + + @Transactional(propagation = Propagation.REQUIRES_NEW) + public Optional loadProject(IRI id) { + return artistDao.getByIdOptional(id); + } +} diff --git a/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/repository/remote/RemoteRepositoryConfigTest.java b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/repository/remote/RemoteRepositoryConfigTest.java new file mode 100644 index 00000000000..a958121b4e3 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/repository/remote/RemoteRepositoryConfigTest.java @@ -0,0 +1,113 @@ +/******************************************************************************* + * Copyright (c) 2022 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.spring.repository.remote; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatExceptionOfType; +import static org.mockserver.model.HttpRequest.request; +import static org.mockserver.model.HttpResponse.response; + +import java.io.IOException; +import java.nio.charset.StandardCharsets; + +import org.apache.commons.io.IOUtils; +import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.spring.support.ConfigurationException; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockserver.client.MockServerClient; +import org.mockserver.junit.jupiter.MockServerExtension; +import org.mockserver.matchers.Times; +import org.mockserver.model.MediaType; +import org.mockserver.model.NottableString; +import org.mockserver.verify.VerificationTimes; + +@ExtendWith(MockServerExtension.class) +class RemoteRepositoryConfigTest { + + private final RemoteRepositoryConfig remoteRepositoryConfig = new RemoteRepositoryConfig(); + + @BeforeEach + void setUp(MockServerClient client) throws Exception { + client.when( + request() + .withMethod("GET") + .withPath("/repositories"), + Times.once() + ) + .respond( + response() + .withContentType(MediaType.parse("application/sparql-results+json;charset=UTF-8")) + .withBody(readFileToString("repositories.srj")) + ); + } + + @Test + void getRemoteRepository(MockServerClient client) { + // Arrange + RemoteRepositoryProperties properties = new RemoteRepositoryProperties(); + properties.setManagerUrl("http://localhost:" + client.getPort()); + properties.setName("test-repo"); + + // Act + Repository repository = remoteRepositoryConfig.getRemoteRepository(properties); + + // Assert + assertThat(repository).isNotNull(); + client.verify( + request() + .withMethod("GET") + .withPath("/repositories") + .withHeader(NottableString.not("Authorization")), + VerificationTimes.once() + ); + } + + @Test + void getRemoteRepositoryWithUsernameAndPassword(MockServerClient client) { + // Arrange + RemoteRepositoryProperties properties = new RemoteRepositoryProperties(); + properties.setManagerUrl("http://localhost:" + client.getPort()); + properties.setName("test-repo"); + properties.setUsername("admin"); + properties.setPassword("1234"); + + // Act + Repository repository = remoteRepositoryConfig.getRemoteRepository(properties); + + // Assert + assertThat(repository).isNotNull(); + client.verify( + request() + .withMethod("GET") + .withPath("/repositories") + .withHeader("Authorization", "Basic YWRtaW46MTIzNA=="), + VerificationTimes.once() + ); + } + + @Test + void getRemoteRepository_error() { + // Arrange + RemoteRepositoryProperties properties = new RemoteRepositoryProperties(); + properties.setManagerUrl("https://unknown-host:8888"); + properties.setName("test-repo"); + + // Act & Assert + assertThatExceptionOfType(ConfigurationException.class) + .isThrownBy(() -> remoteRepositoryConfig.getRemoteRepository(properties)); + } + + private String readFileToString(String fileName) throws IOException { + return IOUtils.resourceToString("__files/" + fileName, StandardCharsets.UTF_8, getClass().getClassLoader()); + } +} diff --git a/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/support/RDF4JTemplateTests.java b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/support/RDF4JTemplateTests.java new file mode 100644 index 00000000000..99270244368 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/support/RDF4JTemplateTests.java @@ -0,0 +1,467 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.support; + +import java.util.List; +import java.util.Set; + +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.Model; +import org.eclipse.rdf4j.model.Value; +import org.eclipse.rdf4j.model.impl.SimpleValueFactory; +import org.eclipse.rdf4j.model.vocabulary.FOAF; +import org.eclipse.rdf4j.model.vocabulary.RDF; +import org.eclipse.rdf4j.sparqlbuilder.constraint.propertypath.builder.PropertyPathBuilder; +import org.eclipse.rdf4j.sparqlbuilder.rdf.Rdf; +import org.eclipse.rdf4j.spring.RDF4JSpringTestBase; +import org.eclipse.rdf4j.spring.dao.support.opbuilder.UpdateExecutionBuilder; +import org.eclipse.rdf4j.spring.dao.support.sparql.NamedSparqlSupplier; +import org.eclipse.rdf4j.spring.domain.model.EX; +import org.eclipse.rdf4j.spring.util.QueryResultUtils; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +public class RDF4JTemplateTests extends RDF4JSpringTestBase { + + @Autowired + private RDF4JTemplate rdf4JTemplate; + + @Test + public void testUpdate1() { + UpdateExecutionBuilder updateBuilder = rdf4JTemplate.update( + String.format("INSERT { <%s> a <%s> } WHERE {} ", EX.of("Vermeer"), EX.Artist)); + updateBuilder.execute(); + Value type = rdf4JTemplate.tupleQuery( + String.format("SELECT ?type WHERE { <%s> a ?type }", + EX.of("Vermeer"))) + .evaluateAndConvert() + .toSingleton(bs -> bs.getBinding("type").getValue()); + Assertions.assertTrue(type.isIRI()); + Assertions.assertEquals(EX.Artist.toString(), type.toString()); + } + + @Test + public void testUpdate1RepeatUpdate() { + testUpdate1(); + testUpdate1(); + testUpdate1(); + testUpdate1(); + testUpdate1(); + } + + @Test + public void testUpdate3() { + UpdateExecutionBuilder updateBuilder = rdf4JTemplate.update(getClass(), "createVermeer", + () -> String.format("INSERT { <%s> a <%s> } WHERE {} ", EX.of("Vermeer"), EX.Artist)); + updateBuilder.execute(); + Value type = rdf4JTemplate.tupleQuery( + String.format("SELECT ?type WHERE { <%s> a ?type }", + EX.of("Vermeer"))) + .evaluateAndConvert() + .toSingleton(bs -> bs.getBinding("type").getValue()); + Assertions.assertTrue(type.isIRI()); + Assertions.assertEquals(EX.Artist.toString(), type.toString()); + } + + @Test + public void testUpdateFromResource() { + UpdateExecutionBuilder updateBuilder = rdf4JTemplate.updateFromResource(getClass(), + "classpath:sparql/insert-vermeer.rq"); + updateBuilder.execute(); + Value type = rdf4JTemplate.tupleQuery( + String.format("SELECT ?type WHERE { <%s> a ?type }", + EX.of("Vermeer"))) + .evaluateAndConvert() + .toSingleton(bs -> bs.getBinding("type").getValue()); + Assertions.assertTrue(type.isIRI()); + Assertions.assertEquals(EX.Artist.toString(), type.toString()); + } + + @Test + public void testUpdate2() { + UpdateExecutionBuilder updateBuilder = rdf4JTemplate.update(getClass(), + NamedSparqlSupplier.of("addVermeer", + () -> String.format("INSERT { <%s> a <%s> } WHERE {} ", EX.of("Vermeer"), EX.Artist))); + updateBuilder.execute(); + Value type = rdf4JTemplate.tupleQuery( + String.format("SELECT ?type " + + "WHERE { <%s> a ?type }", + EX.of("Vermeer"))) + .evaluateAndConvert() + .toSingleton(bs -> bs.getBinding("type").getValue()); + Assertions.assertTrue(type.isIRI()); + Assertions.assertEquals(EX.Artist.toString(), type.toString()); + + } + + @Test + public void testUpdateWithoutCachingStatement() { + UpdateExecutionBuilder updateBuilder = rdf4JTemplate.updateWithoutCachingStatement( + String.format("INSERT { <%s> a <%s> } " + + "WHERE {} ", EX.of("Vermeer"), EX.Artist)); + updateBuilder.execute(); + Value type = rdf4JTemplate.tupleQuery( + String.format("SELECT ?type " + + "WHERE { <%s> a ?type }", + EX.of("Vermeer"))) + .evaluateAndConvert() + .toSingleton(bs -> bs.getBinding("type").getValue()); + Assertions.assertTrue(type.isIRI()); + Assertions.assertEquals(EX.Artist.toString(), type.toString()); + } + + @Test + public void testUpdateWithBuilder() { + rdf4JTemplate.updateWithBuilder() + .subject(EX.of("Vermeer")) + .add(RDF.TYPE, EX.Artist) + .execute(); + Value type = rdf4JTemplate.tupleQuery( + String.format("SELECT ?type WHERE { <%s> a ?type }", + EX.of("Vermeer"))) + .evaluateAndConvert() + .toSingleton(bs -> bs.getBinding("type").getValue()); + Assertions.assertTrue(type.isIRI()); + Assertions.assertEquals(EX.Artist.toString(), type.toString()); + } + + @Test + public void testTupleQuery() { + Set artists = rdf4JTemplate.tupleQuery("PREFIX ex: " + + "SELECT distinct ?artist " + + "WHERE { ?artist a ex:Artist }") + .evaluateAndConvert() + .toSet(bs -> QueryResultUtils.getIRI(bs, "artist")); + Assertions.assertEquals(2, artists.size()); + Assertions.assertTrue(artists.contains(EX.Picasso)); + Assertions.assertTrue(artists.contains(EX.VanGogh)); + } + + @Test + public void testTupleQueryParametrized() { + Set artists = rdf4JTemplate.tupleQuery("PREFIX ex: " + + "SELECT distinct ?artist " + + "WHERE { ?artist a ?type }") + .withBinding("type", EX.Artist) + .evaluateAndConvert() + .toSet(bs -> QueryResultUtils.getIRI(bs, "artist")); + Assertions.assertEquals(2, artists.size()); + Assertions.assertTrue(artists.contains(EX.Picasso)); + Assertions.assertTrue(artists.contains(EX.VanGogh)); + } + + @Test + public void testTupleQueryRepeatQuery() { + testTupleQuery(); + testTupleQuery(); + testTupleQuery(); + testTupleQuery(); + testTupleQuery(); + } + + @Test + public void tupleQuery3() { + Set artists = rdf4JTemplate.tupleQuery(getClass(), "readArtists", + () -> "PREFIX ex: " + + "SELECT distinct ?artist " + + "WHERE { ?artist a ex:Artist }") + .evaluateAndConvert() + .toSet(bs -> QueryResultUtils.getIRI(bs, "artist")); + Assertions.assertEquals(2, artists.size()); + Assertions.assertTrue(artists.contains(EX.Picasso)); + Assertions.assertTrue(artists.contains(EX.VanGogh)); + } + + @Test + public void testTupleQueryFromResource() { + Set artists = rdf4JTemplate.tupleQueryFromResource(getClass(), "classpath:sparql/get-artists.rq") + .evaluateAndConvert() + .toSet(bs -> QueryResultUtils.getIRI(bs, "artist")); + Assertions.assertEquals(2, artists.size()); + Assertions.assertTrue(artists.contains(EX.Picasso)); + Assertions.assertTrue(artists.contains(EX.VanGogh)); + } + + @Test + public void testTupleQuery2() { + Set artists = rdf4JTemplate.tupleQuery(getClass(), + NamedSparqlSupplier.of("getArtists", () -> "PREFIX ex: " + + "SELECT distinct ?artist " + + "WHERE { ?artist a ex:Artist }")) + .evaluateAndConvert() + .toSet(bs -> QueryResultUtils.getIRI(bs, "artist")); + Assertions.assertEquals(2, artists.size()); + Assertions.assertTrue(artists.contains(EX.Picasso)); + Assertions.assertTrue(artists.contains(EX.VanGogh)); + } + + @Test + public void testGraphQuery() { + Model model = rdf4JTemplate.graphQuery("PREFIX ex: " + + "CONSTRUCT { ?a ?p ?o } " + + "WHERE { ?a a ex:Artist; ?p ?o }") + .evaluateAndConvert() + .toModel(); + checkArtistModel(model); + } + + @Test + public void graphQueryRepeatedly() { + for (int i = 0; i < 20; i++) { + testGraphQuery(); + } + } + + protected void checkArtistModel(Model model) { + Assertions.assertTrue( + model.contains( + EX.Picasso, + FOAF.SURNAME, + SimpleValueFactory.getInstance().createLiteral("Picasso"))); + Assertions.assertTrue( + model.contains( + EX.Picasso, + FOAF.FIRST_NAME, + SimpleValueFactory.getInstance().createLiteral("Pablo"))); + Assertions.assertTrue( + model.contains( + EX.VanGogh, + FOAF.FIRST_NAME, + SimpleValueFactory.getInstance().createLiteral("Vincent"))); + Assertions.assertTrue( + model.contains( + EX.VanGogh, + EX.creatorOf, + EX.starryNight)); + } + + @Test + public void testGraphQuery3() { + Model model = rdf4JTemplate.graphQuery( + getClass(), + "getArtistStarshapedGraphs", + () -> "PREFIX ex: " + + "CONSTRUCT { ?a ?p ?o } " + + "WHERE { ?a a ex:Artist; ?p ?o }") + .evaluateAndConvert() + .toModel(); + checkArtistModel(model); + } + + @Test + public void testGraphQueryFromResource() { + Model model = rdf4JTemplate.graphQueryFromResource(getClass(), "classpath:sparql/construct-artists.rq") + .evaluateAndConvert() + .toModel(); + checkArtistModel(model); + } + + @Test + public void testGraphQuery2() { + Model model = rdf4JTemplate.graphQuery( + getClass(), + NamedSparqlSupplier.of("getArtistStarshapedGraphs", + () -> "PREFIX ex: " + + "CONSTRUCT { ?a ?p ?o } " + + "WHERE { ?a a ex:Artist; ?p ?o }")) + .evaluateAndConvert() + .toModel(); + checkArtistModel(model); + } + + @Test + public void testDeleteTriplesWithSubject() { + rdf4JTemplate.deleteTriplesWithSubject(EX.guernica); + Assertions.assertTrue( + rdf4JTemplate.tupleQuery("PREFIX ex: " + + "SELECT distinct ?a " + + "WHERE { ?a a ex:Painting . FILTER (?a = ex:guernica) }") + .evaluateAndConvert() + .toList(bs -> bs.getValue("a")) + .isEmpty()); + Assertions.assertFalse( + rdf4JTemplate.tupleQuery("PREFIX ex: " + + "SELECT distinct ?a " + + "WHERE { ?a ?p ?o . FILTER (?o = ex:guernica) }") + .evaluateAndConvert() + .toList(bs -> bs.getValue("a")) + .isEmpty()); + } + + @Test + public void testDelete() { + rdf4JTemplate.delete(EX.guernica); + Assertions.assertTrue( + rdf4JTemplate.tupleQuery("PREFIX ex: " + + "SELECT distinct ?a " + + "WHERE { ?a a ex:Painting . FILTER (?a = ex:guernica) }") + .evaluateAndConvert() + .toList(bs -> bs.getValue("a")) + .isEmpty()); + Assertions.assertTrue( + rdf4JTemplate.tupleQuery("PREFIX ex: " + + "SELECT distinct ?a " + + "WHERE { ?a ?p ?o . FILTER (?o = ex:guernica) }") + .evaluateAndConvert() + .toList(bs -> bs.getValue("a")) + .isEmpty()); + } + + @Test + public void testDelete2() { + Assertions.assertFalse( + rdf4JTemplate.tupleQuery("PREFIX ex: " + + "SELECT distinct ?a " + + "WHERE { ?a ?b ?c . " + + " FILTER (?a = ex:guernica " + + " || ?c = ex:guernica) " + + "}") + .evaluateAndConvert() + .toList(bs -> bs.getValue("a")) + .isEmpty()); + rdf4JTemplate.delete(EX.Picasso, + List.of( + PropertyPathBuilder + .of(Rdf.iri(EX.creatorOf)) + .build() + )); + Assertions.assertTrue( + rdf4JTemplate.tupleQuery("PREFIX ex: " + + "SELECT distinct ?a " + + "WHERE { ?a ?b ?c . " + + " FILTER (?a = ex:guernica " + + " || ?a = ex:Picasso" + + " || ?c = ex:guernica" + + " || ?c = ex:Picasso) " + + "}") + .evaluateAndConvert() + .toList(bs -> bs.getValue("a")) + .isEmpty()); + Assertions.assertFalse(rdf4JTemplate.tupleQuery("PREFIX ex: " + + "SELECT distinct ?a " + + "WHERE { ?a ?b ?c . " + + " FILTER (?a = ex:starryNight " + + " || ?a = ex:VanGogh" + + " || ?c = ex:starryNight" + + " || ?c = ex:VanGogh) " + + "}") + .evaluateAndConvert() + .toList(bs -> bs.getValue("a")) + .isEmpty()); + + } + + @Test + public void testAssociate_deleteIncoming() { + IRI me = EX.of("me"); + rdf4JTemplate.updateWithBuilder() + .subject(me) + .add(RDF.TYPE, EX.Artist) + .execute(); + + // let's forge some data + rdf4JTemplate.associate( + me, + EX.creatorOf, + Set.of(EX.guernica, EX.starryNight, EX.potatoEaters), + false, true); + Assertions.assertTrue( + rdf4JTemplate.tupleQueryFromResource(getClass(), + "classpath:sparql/get-paintings-of-artist.rq") + .withBinding("artist", EX.Picasso) + .evaluateAndConvert() + .toList(b -> b) + .isEmpty()); + Assertions.assertEquals(1, + rdf4JTemplate.tupleQueryFromResource(getClass(), + "classpath:sparql/get-paintings-of-artist.rq") + .withBinding("artist", EX.VanGogh) + .evaluateAndConvert() + .toList(b -> b) + .size()); + Assertions.assertEquals(3, + rdf4JTemplate.tupleQueryFromResource(getClass(), + "classpath:sparql/get-paintings-of-artist.rq") + .withBinding("artist", me) + .evaluateAndConvert() + .toList(b -> b) + .size()); + + } + + @Test + public void testAssociate_deleteOutgoing() { + rdf4JTemplate.associate( + EX.Picasso, + EX.creatorOf, + Set.of(EX.starryNight, EX.potatoEaters), + true, false); + Assertions.assertEquals(2, + rdf4JTemplate.tupleQueryFromResource(getClass(), + "classpath:sparql/get-paintings-of-artist.rq") + .withBinding("artist", EX.Picasso) + .evaluateAndConvert() + .toList(b -> b) + .size()); + Assertions.assertEquals(3, + rdf4JTemplate.tupleQueryFromResource(getClass(), + "classpath:sparql/get-paintings-of-artist.rq") + .withBinding("artist", EX.VanGogh) + .evaluateAndConvert() + .toList(b -> b) + .size()); + + } + + @Test + public void testAssociate() { + IRI me = EX.of("me"); + rdf4JTemplate.updateWithBuilder() + .subject(me) + .add(RDF.TYPE, EX.Artist) + .execute(); + + // let's forge some data + rdf4JTemplate.associate( + me, + EX.creatorOf, + Set.of(EX.guernica, EX.starryNight, EX.potatoEaters), + false, false); + Assertions.assertEquals(1, + rdf4JTemplate.tupleQueryFromResource(getClass(), + "classpath:sparql/get-paintings-of-artist.rq") + .withBinding("artist", EX.Picasso) + .evaluateAndConvert() + .toList(b -> b) + .size()); + Assertions.assertEquals(3, + rdf4JTemplate.tupleQueryFromResource(getClass(), + "classpath:sparql/get-paintings-of-artist.rq") + .withBinding("artist", EX.VanGogh) + .evaluateAndConvert() + .toList(b -> b) + .size()); + Assertions.assertEquals(3, + rdf4JTemplate.tupleQueryFromResource(getClass(), + "classpath:sparql/get-paintings-of-artist.rq") + .withBinding("artist", me) + .evaluateAndConvert() + .toList(b -> b) + .size()); + + } +} diff --git a/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/support/RDF4JTemplateTestsWithOperationCache.java b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/support/RDF4JTemplateTestsWithOperationCache.java new file mode 100644 index 00000000000..7957b47a535 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/support/RDF4JTemplateTestsWithOperationCache.java @@ -0,0 +1,26 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.support; + +import org.springframework.test.context.TestPropertySource; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@TestPropertySource( + properties = { + "rdf4j.spring.operationcache.enabled=true" + }) +public class RDF4JTemplateTestsWithOperationCache extends RDF4JTemplateTests { + +} diff --git a/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/support/RDF4JTemplateTestsWithOperationLog.java b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/support/RDF4JTemplateTestsWithOperationLog.java new file mode 100644 index 00000000000..83894dd8ca1 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/support/RDF4JTemplateTestsWithOperationLog.java @@ -0,0 +1,26 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.support; + +import org.springframework.test.context.TestPropertySource; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@TestPropertySource( + properties = { + "rdf4j.spring.operationlog.enabled=true" + }) +public class RDF4JTemplateTestsWithOperationLog extends RDF4JTemplateTests { + +} diff --git a/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/support/RDF4JTemplateTestsWithOperationLogViaJMX.java b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/support/RDF4JTemplateTestsWithOperationLogViaJMX.java new file mode 100644 index 00000000000..8eb9735b49b --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/support/RDF4JTemplateTestsWithOperationLogViaJMX.java @@ -0,0 +1,27 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.support; + +import org.springframework.test.context.TestPropertySource; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@TestPropertySource( + properties = { + "rdf4j.spring.operationlog.enabled=true", + "rdf4j.spring.operationlog.jmx.enabled=true" + }) +public class RDF4JTemplateTestsWithOperationLogViaJMX extends RDF4JTemplateTests { + +} diff --git a/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/support/RDF4JTemplateTestsWithResultCache.java b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/support/RDF4JTemplateTestsWithResultCache.java new file mode 100644 index 00000000000..e9cd5e5e47c --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/test/java/org/eclipse/rdf4j/spring/support/RDF4JTemplateTestsWithResultCache.java @@ -0,0 +1,26 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.spring.support; + +import org.springframework.test.context.TestPropertySource; + +/** + * @author Florian Kleedorfer + * @since 4.0.0 + */ +@TestPropertySource( + properties = { + "rdf4j.spring.resultcache.enabled=true" + }) +public class RDF4JTemplateTestsWithResultCache extends RDF4JTemplateTests { + +} diff --git a/spring6-components/rdf4j-spring6/src/test/resources/__files/repositories.srj b/spring6-components/rdf4j-spring6/src/test/resources/__files/repositories.srj new file mode 100644 index 00000000000..a8764c39235 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/test/resources/__files/repositories.srj @@ -0,0 +1,39 @@ +{ + "head" : { + "vars" : [ + "uri", + "id", + "title", + "readable", + "writable" + ] + }, + "results" : { + "bindings" : [ + { + "readable" : { + "datatype" : "http://www.w3.org/2001/XMLSchema#boolean", + "type" : "literal", + "value" : "true" + }, + "id" : { + "type" : "literal", + "value" : "test-repo" + }, + "title" : { + "type" : "literal", + "value" : "" + }, + "uri" : { + "type" : "uri", + "value" : "http://localhost:7200/repositories/test-repo" + }, + "writable" : { + "datatype" : "http://www.w3.org/2001/XMLSchema#boolean", + "type" : "literal", + "value" : "true" + } + } + ] + } +} diff --git a/spring6-components/rdf4j-spring6/src/test/resources/application.properties b/spring6-components/rdf4j-spring6/src/test/resources/application.properties new file mode 100644 index 00000000000..c57684fa326 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/test/resources/application.properties @@ -0,0 +1,18 @@ +# +# /******************************************************************************* +# * Copyright (c) 2021 Eclipse RDF4J contributors. +# * + * All rights reserved. This program and the accompanying materials +# * are made available under the terms of the Eclipse Distribution License v1.0 +# * which accompanies this distribution, and is available at +# * http://www.eclipse.org/org/documents/edl-v10.php. +# *******************************************************************************/ +# + +rdf4j.spring.pool.enabled=true +rdf4j.spring.operationlog.enabled=false +rdf4j.spring.operationlog.jmx.enabled=false +rdf4j.spring.resultcache.enabled=true +rdf4j.spring.resultcache.assume-no-other-repository-clients=true +rdf4j.spring.operationcache.enabled=true +rdf4j.spring.tx.enabled=true diff --git a/spring6-components/rdf4j-spring6/src/test/resources/data/example-data-artists-copy.ttl b/spring6-components/rdf4j-spring6/src/test/resources/data/example-data-artists-copy.ttl new file mode 100644 index 00000000000..8ee2dc9fbec --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/test/resources/data/example-data-artists-copy.ttl @@ -0,0 +1,32 @@ +@prefix ex: . +@prefix foaf: . +ex:Picasso a ex:Artist ; + foaf:firstName "Pablo" ; + foaf:surname "Picasso"; + ex:creatorOf ex:guernica ; + ex:homeAddress _:node1 . + +_:node1 ex:street "31 Art Gallery" ; + ex:city "Madrid" ; + ex:country "Spain" . + +ex:guernica a ex:Painting ; + rdfs:label "Guernica"; + ex:technique "oil on canvas". + +ex:VanGogh a ex:Artist ; + foaf:firstName "Vincent" ; + foaf:surname "van Gogh"; + ex:creatorOf ex:starryNight, ex:sunflowers, ex:potatoEaters . + +ex:starryNight a ex:Painting ; + ex:technique "oil on canvas"; + rdfs:label "Starry Night" . + +ex:sunflowers a ex:Painting ; + ex:technique "oil on canvas"; + rdfs:label "Sunflowers" . + +ex:potatoEaters a ex:Painting ; + ex:technique "oil on canvas"; + rdfs:label "The Potato Eaters" . \ No newline at end of file diff --git a/spring6-components/rdf4j-spring6/src/test/resources/logback.xml b/spring6-components/rdf4j-spring6/src/test/resources/logback.xml new file mode 100644 index 00000000000..cc3c5b4b7b3 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/test/resources/logback.xml @@ -0,0 +1,23 @@ + + + + + + %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n + + + + + + + + diff --git a/spring6-components/rdf4j-spring6/src/test/resources/sparql/construct-artists.rq b/spring6-components/rdf4j-spring6/src/test/resources/sparql/construct-artists.rq new file mode 100644 index 00000000000..02901f29afc --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/test/resources/sparql/construct-artists.rq @@ -0,0 +1,3 @@ +PREFIX ex: + +CONSTRUCT {?artist ?p ?o } WHERE { ?artist a ex:Artist; ?p ?o } \ No newline at end of file diff --git a/spring6-components/rdf4j-spring6/src/test/resources/sparql/get-artists.rq b/spring6-components/rdf4j-spring6/src/test/resources/sparql/get-artists.rq new file mode 100644 index 00000000000..006366ac6f3 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/test/resources/sparql/get-artists.rq @@ -0,0 +1,3 @@ +PREFIX ex: + +SELECT distinct ?artist WHERE { ?artist a ex:Artist } \ No newline at end of file diff --git a/spring6-components/rdf4j-spring6/src/test/resources/sparql/get-paintings-of-artist.rq b/spring6-components/rdf4j-spring6/src/test/resources/sparql/get-paintings-of-artist.rq new file mode 100644 index 00000000000..54cbe002e7b --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/test/resources/sparql/get-paintings-of-artist.rq @@ -0,0 +1,7 @@ +PREFIX ex: + +SELECT distinct ?painting +WHERE { ?artist + a ex:Artist; + ex:creatorOf ?painting . +} \ No newline at end of file diff --git a/spring6-components/rdf4j-spring6/src/test/resources/sparql/get-paintings-of-artist2.rq b/spring6-components/rdf4j-spring6/src/test/resources/sparql/get-paintings-of-artist2.rq new file mode 100644 index 00000000000..656405a30c4 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/test/resources/sparql/get-paintings-of-artist2.rq @@ -0,0 +1,7 @@ +PREFIX ex: + +SELECT distinct ?painting ?artist +WHERE { ?artist + a ex:Artist; + ex:creatorOf ?painting . +} \ No newline at end of file diff --git a/spring6-components/rdf4j-spring6/src/test/resources/sparql/insert-vermeer.rq b/spring6-components/rdf4j-spring6/src/test/resources/sparql/insert-vermeer.rq new file mode 100644 index 00000000000..792930f81c1 --- /dev/null +++ b/spring6-components/rdf4j-spring6/src/test/resources/sparql/insert-vermeer.rq @@ -0,0 +1,3 @@ +prefix ex: + +INSERT { ex:Vermeer a ex:Artist } where {} \ No newline at end of file diff --git a/spring6-components/spring6-boot-sparql-web/README.md b/spring6-components/spring6-boot-sparql-web/README.md new file mode 100644 index 00000000000..990c3f56d4a --- /dev/null +++ b/spring6-components/spring6-boot-sparql-web/README.md @@ -0,0 +1,55 @@ +# SPARQL-Readonly + +This code is to be used in a different (your) spring-boot project. + +For example add the following to your pom otherwise setup like any other +spring-boot application + +``` + + + org.eclipse.rdf4j + rdf4j-spring-boot-sparql-web + ${rdf4j.version} + + +``` + +In your spring-boot application + +```java +package org.example; + +import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.sail.SailRepository; + +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.ComponentScan; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Import; + +@SpringBootApplication +@ComponentScan(basePackages = {"org.eclipse.rdf4j", "org.example"}) +@Import(QueryResponder.class) +public class Server { + @Bean + MemoryStore store = ... ;//Configure progamatically your specific store + public Repository getRepository() { + SailRepository sailRepository = new SailRepository(store); + sailRepository.init(); + return sailRepository; + } + + public static void main(String[] args) { + SpringApplication.run(Server.class, args); + } +} + +``` + +And that is it, you have a single `/sparql` api endpoint on your spring-boot application +that provides readonly access to the store you have configured. + +This allows the usuall docker image build and more that spring-boot provides. diff --git a/spring6-components/spring6-boot-sparql-web/pom.xml b/spring6-components/spring6-boot-sparql-web/pom.xml new file mode 100644 index 00000000000..56a3e534210 --- /dev/null +++ b/spring6-components/spring6-boot-sparql-web/pom.xml @@ -0,0 +1,104 @@ + + + 4.0.0 + + org.eclipse.rdf4j + rdf4j-spring6-components + 5.3.0-SNAPSHOT + + rdf4j-spring6-boot-sparql-web + RDF4J: Spring boot 3 component for a HTTP sparql server + HTTP server component implementing only the SPARQL protocol + + 5.0.0 + + + + ${project.groupId} + rdf4j-repository-api + ${project.version} + + + ${project.groupId} + rdf4j-rio-api + ${project.version} + + + ${project.groupId} + rdf4j-queryresultio-api + ${project.version} + + + ${project.groupId} + rdf4j-storage + ${project.version} + pom + + + org.eclipse.jetty + * + + + + org.eclipse.jetty.ee10 + + * + + + org.eclipse.jetty.http2 + * + + + + + jakarta.servlet + jakarta.servlet-api + + + ${project.groupId} + rdf4j-config + ${project.version} + + + org.springframework.boot + spring-boot-starter-data-rest + compile + + + + org.springframework.boot + spring-boot-starter-tomcat + + + org.ow2.asm + asm + + + + + org.springframework.boot + spring-boot-starter-test + test + + + org.springframework + spring-jcl + + + + + org.springframework.boot + spring-boot-starter-jetty + compile + + + + + + org.springframework.boot + spring-boot-maven-plugin + ${spring.boot.version} + + + + diff --git a/spring6-components/spring6-boot-sparql-web/src/main/java/org/eclipse/rdf4j/http/server/readonly/EvaluateResultHttpResponse.java b/spring6-components/spring6-boot-sparql-web/src/main/java/org/eclipse/rdf4j/http/server/readonly/EvaluateResultHttpResponse.java new file mode 100644 index 00000000000..d6e655b96a4 --- /dev/null +++ b/spring6-components/spring6-boot-sparql-web/src/main/java/org/eclipse/rdf4j/http/server/readonly/EvaluateResultHttpResponse.java @@ -0,0 +1,45 @@ +/******************************************************************************* + * Copyright (c) 2022 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.readonly; + +import java.io.IOException; +import java.io.OutputStream; + +import org.eclipse.rdf4j.http.server.readonly.sparql.EvaluateResult; + +import jakarta.servlet.http.HttpServletResponse; + +/** + * Encapsulated the {@link HttpServletResponse}. + */ +class EvaluateResultHttpResponse implements EvaluateResult { + + private HttpServletResponse response; + + public EvaluateResultHttpResponse(HttpServletResponse response) { + this.response = response; + } + + @Override + public void setContentType(String contentType) { + response.setContentType(contentType); + } + + @Override + public String getContentType() { + return response.getContentType(); + } + + @Override + public OutputStream getOutputstream() throws IOException { + return response.getOutputStream(); + } +} diff --git a/spring6-components/spring6-boot-sparql-web/src/main/java/org/eclipse/rdf4j/http/server/readonly/QueryResponder.java b/spring6-components/spring6-boot-sparql-web/src/main/java/org/eclipse/rdf4j/http/server/readonly/QueryResponder.java new file mode 100644 index 00000000000..35bb02299d0 --- /dev/null +++ b/spring6-components/spring6-boot-sparql-web/src/main/java/org/eclipse/rdf4j/http/server/readonly/QueryResponder.java @@ -0,0 +1,87 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.readonly; + +import static org.springframework.http.HttpHeaders.ACCEPT; +import static org.springframework.http.MediaType.APPLICATION_FORM_URLENCODED_VALUE; + +import java.io.IOException; + +import org.eclipse.rdf4j.common.annotation.Experimental; +import org.eclipse.rdf4j.http.server.readonly.sparql.SparqlQueryEvaluator; +import org.eclipse.rdf4j.query.MalformedQueryException; +import org.eclipse.rdf4j.repository.Repository; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestHeader; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; + +@Experimental +@RestController +public class QueryResponder { + + private static final String[] ALL_GRAPHS = {}; + + /** + * The repository that is being served. + */ + @Autowired + private final Repository repository; + + @Autowired + private SparqlQueryEvaluator sparqlQueryEvaluator; + + public QueryResponder(Repository repository) { + this.repository = repository; + } + + @PostMapping(value = "/sparql", consumes = APPLICATION_FORM_URLENCODED_VALUE) + public void sparqlPostURLencoded( + @RequestParam(value = "default-graph-uri", required = false) String defaultGraphUri, + @RequestParam(value = "named-graph-uri", required = false) String namedGraphUri, + @RequestParam(value = "query") String query, @RequestHeader(ACCEPT) String acceptHeader, + HttpServletRequest request, HttpServletResponse response) throws IOException { + try { + EvaluateResultHttpResponse result = new EvaluateResultHttpResponse(response); + sparqlQueryEvaluator.evaluate(result, repository, query, acceptHeader, toArray(defaultGraphUri), + toArray(namedGraphUri)); + } catch (MalformedQueryException | IllegalStateException | IOException e) { + response.sendError(HttpServletResponse.SC_BAD_REQUEST); + } + } + + @GetMapping("/sparql") + public void sparqlGet(@RequestParam(value = "default-graph-uri", required = false) String defaultGraphUri, + @RequestParam(value = "named-graph-uri", required = false) String namedGraphUri, + @RequestParam(value = "query") String query, @RequestHeader(ACCEPT) String acceptHeader, + HttpServletRequest request, HttpServletResponse response) throws IOException { + + try { + EvaluateResultHttpResponse result = new EvaluateResultHttpResponse(response); + sparqlQueryEvaluator.evaluate(result, repository, query, acceptHeader, toArray(defaultGraphUri), + toArray(namedGraphUri)); + } catch (MalformedQueryException | IllegalStateException | IOException e) { + response.sendError(HttpServletResponse.SC_BAD_REQUEST, e.getMessage()); + } + } + + private String[] toArray(String namedGraphUri) { + if (namedGraphUri != null) { + return new String[] { namedGraphUri }; + } + return ALL_GRAPHS; + } +} diff --git a/spring6-components/spring6-boot-sparql-web/src/main/java/org/eclipse/rdf4j/http/server/readonly/ReadOnlySparqlApplication.java b/spring6-components/spring6-boot-sparql-web/src/main/java/org/eclipse/rdf4j/http/server/readonly/ReadOnlySparqlApplication.java new file mode 100644 index 00000000000..c6c1c83d2e7 --- /dev/null +++ b/spring6-components/spring6-boot-sparql-web/src/main/java/org/eclipse/rdf4j/http/server/readonly/ReadOnlySparqlApplication.java @@ -0,0 +1,23 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.readonly; + +import org.eclipse.rdf4j.common.annotation.Experimental; +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; + +@Experimental +@SpringBootApplication +public class ReadOnlySparqlApplication { + public static void main(String[] args) { + SpringApplication.run(ReadOnlySparqlApplication.class, args); + } +} diff --git a/spring6-components/spring6-boot-sparql-web/src/main/java/org/eclipse/rdf4j/http/server/readonly/sparql/EvaluateResult.java b/spring6-components/spring6-boot-sparql-web/src/main/java/org/eclipse/rdf4j/http/server/readonly/sparql/EvaluateResult.java new file mode 100644 index 00000000000..e3ac5097f59 --- /dev/null +++ b/spring6-components/spring6-boot-sparql-web/src/main/java/org/eclipse/rdf4j/http/server/readonly/sparql/EvaluateResult.java @@ -0,0 +1,25 @@ +/******************************************************************************* + * Copyright (c) 2022 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.readonly.sparql; + +import java.io.IOException; +import java.io.OutputStream; + +/** + * In/Out Parameter for {@link SparqlQueryEvaluator} to make it independend from things like the serlvet api. + */ +public interface EvaluateResult { + void setContentType(String contentType); + + String getContentType(); + + OutputStream getOutputstream() throws IOException; +} diff --git a/spring6-components/spring6-boot-sparql-web/src/main/java/org/eclipse/rdf4j/http/server/readonly/sparql/QueryTypes.java b/spring6-components/spring6-boot-sparql-web/src/main/java/org/eclipse/rdf4j/http/server/readonly/sparql/QueryTypes.java new file mode 100644 index 00000000000..8fd9e831ce3 --- /dev/null +++ b/spring6-components/spring6-boot-sparql-web/src/main/java/org/eclipse/rdf4j/http/server/readonly/sparql/QueryTypes.java @@ -0,0 +1,135 @@ +/******************************************************************************* + * Copyright (c) 2022 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.readonly.sparql; + +import java.io.IOException; +import java.util.Optional; +import java.util.function.Predicate; + +import org.eclipse.rdf4j.common.lang.FileFormat; +import org.eclipse.rdf4j.query.BooleanQuery; +import org.eclipse.rdf4j.query.GraphQuery; +import org.eclipse.rdf4j.query.Query; +import org.eclipse.rdf4j.query.QueryEvaluationException; +import org.eclipse.rdf4j.query.TupleQuery; +import org.eclipse.rdf4j.query.resultio.BooleanQueryResultFormat; +import org.eclipse.rdf4j.query.resultio.BooleanQueryResultWriter; +import org.eclipse.rdf4j.query.resultio.BooleanQueryResultWriterFactory; +import org.eclipse.rdf4j.query.resultio.BooleanQueryResultWriterRegistry; +import org.eclipse.rdf4j.query.resultio.QueryResultFormat; +import org.eclipse.rdf4j.query.resultio.QueryResultIO; +import org.eclipse.rdf4j.query.resultio.TupleQueryResultFormat; +import org.eclipse.rdf4j.rio.RDFFormat; +import org.eclipse.rdf4j.rio.RDFHandlerException; +import org.eclipse.rdf4j.rio.Rio; +import org.eclipse.rdf4j.rio.UnsupportedRDFormatException; + +enum QueryTypes { + CONSTRUCT_OR_DESCRIBE(q -> q instanceof GraphQuery, RDFFormat.TURTLE, RDFFormat.NTRIPLES, RDFFormat.JSONLD, + RDFFormat.RDFXML) { + @Override + protected void evaluate(EvaluateResult result, Query q, String acceptHeader) + throws QueryEvaluationException, RDFHandlerException, UnsupportedRDFormatException, IOException { + GraphQuery gq = (GraphQuery) q; + RDFFormat format = (RDFFormat) bestFormat(acceptHeader); + result.setContentType(format.getDefaultMIMEType()); + gq.evaluate(Rio.createWriter(format, result.getOutputstream())); + } + }, + SELECT(q -> q instanceof TupleQuery, TupleQueryResultFormat.JSON, TupleQueryResultFormat.SPARQL, + TupleQueryResultFormat.CSV, TupleQueryResultFormat.TSV) { + @Override + protected void evaluate(EvaluateResult result, Query q, String acceptHeader) + throws QueryEvaluationException, RDFHandlerException, UnsupportedRDFormatException, IOException { + TupleQuery tq = (TupleQuery) q; + QueryResultFormat format = (QueryResultFormat) bestFormat(acceptHeader); + result.setContentType(format.getDefaultMIMEType()); + tq.evaluate(QueryResultIO.createTupleWriter(format, result.getOutputstream())); + } + }, + + ASK(q -> q instanceof BooleanQuery, BooleanQueryResultFormat.TEXT, BooleanQueryResultFormat.JSON, + BooleanQueryResultFormat.SPARQL) { + @Override + protected void evaluate(EvaluateResult result, Query q, String acceptHeader) + throws QueryEvaluationException, RDFHandlerException, UnsupportedRDFormatException, IOException { + BooleanQuery bq = (BooleanQuery) q; + QueryResultFormat format = (QueryResultFormat) bestFormat(acceptHeader); + result.setContentType(format.getDefaultMIMEType()); + final Optional optional = BooleanQueryResultWriterRegistry + .getInstance() + .get(format); + if (optional.isPresent()) { + BooleanQueryResultWriter writer = optional.get().getWriter(result.getOutputstream()); + writer.handleBoolean(bq.evaluate()); + } + } + }; + + private final FileFormat[] formats; + private final Predicate typeChecker; + + QueryTypes(Predicate typeChecker, FileFormat... formats) { + this.typeChecker = typeChecker; + this.formats = formats; + } + + /** + * Test if the query is of a type that can be answered. And that the accept headers allow for the response to be + * send. + * + * @param preparedQuery + * @param acceptHeader + * @return true if the query is of the right type and acceptHeaders are acceptable. + * @throws IllegalStateException if no acceptHeader is present + */ + public boolean accepts(Query preparedQuery, String acceptHeader) throws IllegalStateException { + if (accepts(preparedQuery)) { + if (acceptHeader == null || acceptHeader.isEmpty()) { + return true; + } else { + for (FileFormat format : formats) { + for (String mimeType : format.getMIMETypes()) { + if (acceptHeader.contains(mimeType)) { + return true; + } + } + } + } + throw new IllegalStateException("acceptHeader is mandatory."); + } + return false; + } + + protected abstract void evaluate(EvaluateResult result, Query q, String acceptHeader) + throws QueryEvaluationException, RDFHandlerException, UnsupportedRDFormatException, IOException; + + protected boolean accepts(Query q) { + return typeChecker.test(q); + } + + ; + + protected FileFormat bestFormat(String acceptHeader) { + if (acceptHeader == null || acceptHeader.isEmpty()) { + return formats[0]; + } else { + for (FileFormat format : formats) { + for (String mimeType : format.getMIMETypes()) { + if (acceptHeader.contains(mimeType)) { + return format; + } + } + } + } + return formats[0]; + } +} diff --git a/spring6-components/spring6-boot-sparql-web/src/main/java/org/eclipse/rdf4j/http/server/readonly/sparql/SparqlQueryEvaluator.java b/spring6-components/spring6-boot-sparql-web/src/main/java/org/eclipse/rdf4j/http/server/readonly/sparql/SparqlQueryEvaluator.java new file mode 100644 index 00000000000..73a03f1f862 --- /dev/null +++ b/spring6-components/spring6-boot-sparql-web/src/main/java/org/eclipse/rdf4j/http/server/readonly/sparql/SparqlQueryEvaluator.java @@ -0,0 +1,37 @@ +/******************************************************************************* + * Copyright (c) 2022 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.readonly.sparql; + +import java.io.IOException; + +import org.eclipse.rdf4j.query.Dataset; +import org.eclipse.rdf4j.query.MalformedQueryException; +import org.eclipse.rdf4j.repository.Repository; + +public interface SparqlQueryEvaluator { + /** + * Evaluates/Execute the passed query against the passed repository usimg the passed arguments. + * + * @param result in/out parameter for returning the contentType and the result stream. + * @param repository the repository against which the query is to be executed + * @param query The query to be evaluated + * @param acceptHeader needed to find the best response format. + * @param defaultGraphUri The graphs that are embedded in the query with FROM {@link Dataset#getDefaultGraphs()} + * @param namedGraphUris The graphs that are embedded in the query with FROM NAMED see + * {@link Dataset#getNamedGraphs()} + * @throws MalformedQueryException If the supplied query is malformed. + * @throws IOException if there is a problem with the {@link EvaluateResult#getOutputstream()} + * @throws IllegalStateException if no acceptHeader is present + */ + void evaluate(EvaluateResult result, Repository repository, String query, String acceptHeader, + String[] defaultGraphUri, + String[] namedGraphUris) throws MalformedQueryException, IllegalStateException, IOException; +} diff --git a/spring6-components/spring6-boot-sparql-web/src/main/java/org/eclipse/rdf4j/http/server/readonly/sparql/SparqlQueryEvaluatorDefault.java b/spring6-components/spring6-boot-sparql-web/src/main/java/org/eclipse/rdf4j/http/server/readonly/sparql/SparqlQueryEvaluatorDefault.java new file mode 100644 index 00000000000..7e5c74c3593 --- /dev/null +++ b/spring6-components/spring6-boot-sparql-web/src/main/java/org/eclipse/rdf4j/http/server/readonly/sparql/SparqlQueryEvaluatorDefault.java @@ -0,0 +1,68 @@ +/******************************************************************************* + * Copyright (c) 2022 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.readonly.sparql; + +import java.io.IOException; +import java.util.Arrays; + +import org.eclipse.rdf4j.model.ValueFactory; +import org.eclipse.rdf4j.query.Dataset; +import org.eclipse.rdf4j.query.MalformedQueryException; +import org.eclipse.rdf4j.query.Query; +import org.eclipse.rdf4j.query.QueryLanguage; +import org.eclipse.rdf4j.query.impl.SimpleDataset; +import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.springframework.stereotype.Component; + +@Component +class SparqlQueryEvaluatorDefault implements SparqlQueryEvaluator { + + @Override + public void evaluate(EvaluateResult result, Repository repository, String query, + String acceptHeader, String[] defaultGraphUri, String[] namedGraphUris) + throws MalformedQueryException, IOException, IllegalStateException { + try (RepositoryConnection connection = repository.getConnection()) { + Query preparedQuery = connection.prepareQuery(QueryLanguage.SPARQL, query); + preparedQuery.setDataset(getQueryDataSet(defaultGraphUri, namedGraphUris, connection)); + for (QueryTypes qt : QueryTypes.values()) { + if (qt.accepts(preparedQuery, acceptHeader)) { + qt.evaluate(result, preparedQuery, acceptHeader); + } + } + } + } + + /** + * @param defaultGraphUri + * @param namedGraphUris + * @param connection + * @see protocol dataset + */ + private Dataset getQueryDataSet(String[] defaultGraphUri, String[] namedGraphUris, + RepositoryConnection connection) { + SimpleDataset dataset = new SimpleDataset(); + + ValueFactory valueFactory = connection.getValueFactory(); + if (defaultGraphUri != null) { + Arrays.stream(defaultGraphUri) + .map(valueFactory::createIRI) + .forEach(dataset::addDefaultGraph); + } + + if (namedGraphUris != null) { + Arrays.stream(namedGraphUris) + .map(valueFactory::createIRI) + .forEach(dataset::addNamedGraph); + } + return dataset; + } +} diff --git a/spring6-components/spring6-boot-sparql-web/src/test/java/org/eclipse/rdf4j/http/server/readonly/MemoryBackedOnlySparqlApplicationTest.java b/spring6-components/spring6-boot-sparql-web/src/test/java/org/eclipse/rdf4j/http/server/readonly/MemoryBackedOnlySparqlApplicationTest.java new file mode 100644 index 00000000000..88d2cd444d7 --- /dev/null +++ b/spring6-components/spring6-boot-sparql-web/src/test/java/org/eclipse/rdf4j/http/server/readonly/MemoryBackedOnlySparqlApplicationTest.java @@ -0,0 +1,80 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.readonly; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +import java.io.IOException; + +import org.eclipse.rdf4j.http.client.SPARQLProtocolSession; +import org.eclipse.rdf4j.query.MalformedQueryException; +import org.eclipse.rdf4j.query.QueryInterruptedException; +import org.eclipse.rdf4j.query.QueryLanguage; +import org.eclipse.rdf4j.query.TupleQueryResult; +import org.eclipse.rdf4j.repository.RepositoryException; +import org.junit.jupiter.api.Test; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.boot.test.context.SpringBootTest.WebEnvironment; +import org.springframework.boot.test.web.client.TestRestTemplate; +import org.springframework.boot.test.web.server.LocalServerPort; +import org.springframework.context.annotation.Import; + +@SpringBootTest(webEnvironment = WebEnvironment.RANDOM_PORT) +@Import(MemoryBackedOnlySparqlApplicationTestConfig.class) +public class MemoryBackedOnlySparqlApplicationTest { + @LocalServerPort + private int port; + + @Autowired + private QueryResponder queryResponder; + @Autowired + private TestRestTemplate restTemplate; + + @Test + public void contextLoads() { + assertThat(queryResponder).isNotNull(); + } + + @Test + public void testAskQuery() { + assertThat(this.restTemplate.getForObject("http://localhost:" + port + "/sparql?query={query}", + String.class, "ASK { ?s ?p ?o }")).contains("true"); + + } + + @Test + public void testSelectQuery() { + String forObject = this.restTemplate.getForObject("http://localhost:" + port + "/sparql?query={query}", + String.class, "SELECT * WHERE { ?s ?p ?o }"); + assertThat(forObject).contains("http://www.w3.org/1999/02/22-rdf-syntax-ns#Bag"); + } + + @Test + public void testSPARQLRepository() throws QueryInterruptedException, RepositoryException, + MalformedQueryException, IOException { + String query = "SELECT * WHERE { ?s ?p ?o }"; + TestSPARQLRepository rep = new TestSPARQLRepository("http://localhost:" + port + "/sparql"); + try ( + SPARQLProtocolSession session = rep.createSPARQLProtocolSession(); + TupleQueryResult sendTupleQuery = session.sendTupleQuery(QueryLanguage.SPARQL, query, null, false, + null)) { + + while (sendTupleQuery.hasNext()) { + assertNotNull(sendTupleQuery.next()); + } + } finally { + rep.shutDown(); + } + } + +} diff --git a/spring6-components/spring6-boot-sparql-web/src/test/java/org/eclipse/rdf4j/http/server/readonly/MemoryBackedOnlySparqlApplicationTestConfig.java b/spring6-components/spring6-boot-sparql-web/src/test/java/org/eclipse/rdf4j/http/server/readonly/MemoryBackedOnlySparqlApplicationTestConfig.java new file mode 100644 index 00000000000..60f022e89f6 --- /dev/null +++ b/spring6-components/spring6-boot-sparql-web/src/test/java/org/eclipse/rdf4j/http/server/readonly/MemoryBackedOnlySparqlApplicationTestConfig.java @@ -0,0 +1,35 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.http.server.readonly; + +import org.eclipse.rdf4j.model.vocabulary.RDF; +import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.sail.SailRepository; +import org.eclipse.rdf4j.repository.sail.SailRepositoryConnection; +import org.eclipse.rdf4j.sail.memory.MemoryStore; +import org.springframework.boot.test.context.TestConfiguration; +import org.springframework.context.annotation.Bean; + +@TestConfiguration +public class MemoryBackedOnlySparqlApplicationTestConfig { + + @Bean(destroyMethod = "shutDown") + public Repository getTestRepository() { + SailRepository sailRepository = new SailRepository(new MemoryStore()); + sailRepository.init(); + try (SailRepositoryConnection connection = sailRepository.getConnection()) { + connection.add(sailRepository.getValueFactory().createStatement(RDF.ALT, RDF.BAG, RDF.FIRST)); + } + + return sailRepository; + } +} diff --git a/spring6-components/spring6-boot-sparql-web/src/test/java/org/eclipse/rdf4j/http/server/readonly/TestSPARQLRepository.java b/spring6-components/spring6-boot-sparql-web/src/test/java/org/eclipse/rdf4j/http/server/readonly/TestSPARQLRepository.java new file mode 100644 index 00000000000..8baa2e5166f --- /dev/null +++ b/spring6-components/spring6-boot-sparql-web/src/test/java/org/eclipse/rdf4j/http/server/readonly/TestSPARQLRepository.java @@ -0,0 +1,31 @@ +/******************************************************************************* + * Copyright (c) 2021 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.readonly; + +import org.eclipse.rdf4j.http.client.SPARQLProtocolSession; +import org.eclipse.rdf4j.repository.sparql.SPARQLRepository; + +public class TestSPARQLRepository extends SPARQLRepository { + + public TestSPARQLRepository(String endpointUrl) { + super(endpointUrl); + } + + public TestSPARQLRepository(String queryEndpointUrl, String updateEndpointUrl) { + super(queryEndpointUrl, updateEndpointUrl); + } + + @Override + public SPARQLProtocolSession createSPARQLProtocolSession() { + return super.createSPARQLProtocolSession(); + } + +} diff --git a/spring6-components/spring6-boot-sparql-web/src/test/java/org/eclipse/rdf4j/http/server/readonly/sparql/EvaluateResultDefault.java b/spring6-components/spring6-boot-sparql-web/src/test/java/org/eclipse/rdf4j/http/server/readonly/sparql/EvaluateResultDefault.java new file mode 100644 index 00000000000..6ecad6e19e1 --- /dev/null +++ b/spring6-components/spring6-boot-sparql-web/src/test/java/org/eclipse/rdf4j/http/server/readonly/sparql/EvaluateResultDefault.java @@ -0,0 +1,42 @@ +/******************************************************************************* + * Copyright (c) 2023 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.readonly.sparql; + +import java.io.OutputStream; + +public class EvaluateResultDefault implements EvaluateResult { + private String contentType; + + private OutputStream outputstream; + + public EvaluateResultDefault(OutputStream outputstream) { + this.outputstream = outputstream; + } + + @Override + public String getContentType() { + return contentType; + } + + @Override + public void setContentType(String contentType) { + this.contentType = contentType; + } + + @Override + public OutputStream getOutputstream() { + return outputstream; + } + + public void setOutputstream(OutputStream outputstream) { + this.outputstream = outputstream; + } +} diff --git a/spring6-components/spring6-boot-sparql-web/src/test/java/org/eclipse/rdf4j/http/server/readonly/sparql/SparqlQueryEvaluatorDefaultTest.java b/spring6-components/spring6-boot-sparql-web/src/test/java/org/eclipse/rdf4j/http/server/readonly/sparql/SparqlQueryEvaluatorDefaultTest.java new file mode 100644 index 00000000000..5ef6901a70f --- /dev/null +++ b/spring6-components/spring6-boot-sparql-web/src/test/java/org/eclipse/rdf4j/http/server/readonly/sparql/SparqlQueryEvaluatorDefaultTest.java @@ -0,0 +1,175 @@ +/******************************************************************************* + * Copyright (c) 2023 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.readonly.sparql; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; + +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.Literal; +import org.eclipse.rdf4j.model.Model; +import org.eclipse.rdf4j.model.util.ModelBuilder; +import org.eclipse.rdf4j.model.util.Values; +import org.eclipse.rdf4j.query.MalformedQueryException; +import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.eclipse.rdf4j.repository.sail.SailRepository; +import org.eclipse.rdf4j.sail.memory.MemoryStore; +import org.junit.jupiter.api.Test; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonMappingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ArrayNode; + +public class SparqlQueryEvaluatorDefaultTest { + + private static final IRI CTX1 = Values.iri("http://example.com/ctx1"); + + private static final IRI CTX2 = Values.iri("http://example.com/ctx2"); + + private static final IRI TYP1 = Values.iri("http://example.com/typ1"); + + private static final IRI TYP2 = Values.iri("http://example.com/typ2"); + + @Test + public void queryWithoutContext() throws MalformedQueryException, IllegalStateException, IOException { + Repository repo = new SailRepository(new MemoryStore()); + try (RepositoryConnection con = repo.getConnection()) { + con.add(getTestModel1(), CTX1); + con.add(getTestModel2(), CTX2); + con.getStatements(null, null, null).forEach(System.out::println); + } + + EvaluateResult evaluateResult = new EvaluateResultDefault(new ByteArrayOutputStream()); + SparqlQueryEvaluator sparqlQueryEvaluator = new SparqlQueryEvaluatorDefault(); + + String queryString = "SELECT ?subject ?predicate ?object WHERE { ?subject ?predicate ?object} "; + String[] namedGraphUris = null;// new String[] {}; + + String[] defaultGraphUri = null;// new String[] {}; + + sparqlQueryEvaluator.evaluate(evaluateResult, repo, queryString, null, + defaultGraphUri, namedGraphUris); + + ArrayNode bindingArray = asArrayNode(evaluateResult); + assertEquals(4, bindingArray.size()); + } + + @Test + public void queryWithDefaultGraphUri() throws MalformedQueryException, IllegalStateException, IOException { + Repository repo = new SailRepository(new MemoryStore()); + try (RepositoryConnection con = repo.getConnection()) { + con.add(getTestModel1(), CTX1); + con.add(getTestModel2(), CTX1); + con.getStatements(null, null, null).forEach(System.out::println); + } + + EvaluateResult evaluateResult = new EvaluateResultDefault(new ByteArrayOutputStream()); + SparqlQueryEvaluator sparqlQueryEvaluator = new SparqlQueryEvaluatorDefault(); + + String queryString = "SELECT ?subject ?predicate ?object WHERE { ?subject ?predicate ?object} "; + String[] namedGraphUris = new String[] {}; + String[] defaultGraphUri = new String[] { CTX1.stringValue() }; + + sparqlQueryEvaluator.evaluate(evaluateResult, repo, queryString, null, + defaultGraphUri, namedGraphUris); + + ArrayNode bindingArray = asArrayNode(evaluateResult); + assertEquals(4, bindingArray.size()); + } + + @Test + public void queryWithDefaultGraphUriAndNamedGraphUris() + throws MalformedQueryException, IllegalStateException, IOException { + Repository repo = new SailRepository(new MemoryStore()); + try (RepositoryConnection con = repo.getConnection()) { + con.add(getTestModel1(), CTX1); + con.add(getTestModel2(), CTX2); + con.getStatements(null, null, null).forEach(System.out::println); + } + + EvaluateResult evaluateResult = new EvaluateResultDefault(new ByteArrayOutputStream()); + SparqlQueryEvaluator sparqlQueryEvaluator = new SparqlQueryEvaluatorDefault(); + + String queryString = "SELECT ?subject ?predicate ?object WHERE { ?subject ?predicate ?object} "; + String[] namedGraphUris = new String[] { CTX2.stringValue() }; + String[] defaultGraphUri = new String[] { CTX1.stringValue() }; + + sparqlQueryEvaluator.evaluate(evaluateResult, repo, queryString, null, + defaultGraphUri, namedGraphUris); + + ArrayNode bindingArray = asArrayNode(evaluateResult); + assertEquals(2, bindingArray.size()); + } + + @Test + public void queryWithNamedGraphUris() throws MalformedQueryException, IllegalStateException, IOException { + Repository repo = new SailRepository(new MemoryStore()); + try (RepositoryConnection con = repo.getConnection()) { + con.add(getTestModel1(), CTX1); + con.add(getTestModel2(), CTX2); + con.getStatements(null, null, null).forEach(System.out::println); + } + + EvaluateResult evaluateResult = new EvaluateResultDefault(new ByteArrayOutputStream()); + SparqlQueryEvaluator sparqlQueryEvaluator = new SparqlQueryEvaluatorDefault(); + + String queryString = "SELECT ?subject ?predicate ?object WHERE { ?subject ?predicate ?object} "; + String[] namedGraphUris = new String[] {}; + String[] defaultGraphUri = new String[] { CTX1.stringValue(), CTX2.stringValue() }; + + sparqlQueryEvaluator.evaluate(evaluateResult, repo, queryString, null, + defaultGraphUri, namedGraphUris); + + ArrayNode bindingArray = asArrayNode(evaluateResult); + assertEquals(4, bindingArray.size()); + } + + private Model getTestModel1() { + Literal obj1_1 = Values.literal("testValue_user1_obj1_1"); + Literal obj1_2 = Values.literal("testValue_user1_obj1_2"); + IRI obj1 = Values.iri("http://example.com/user1/object1"); + Model model = new ModelBuilder() + .subject(obj1) + .add(TYP1, obj1_1) + .add(TYP2, obj1_2) + .build(); + return model; + } + + private Model getTestModel2() { + Literal obj2_1 = Values.literal("testValue_user1_obj2_1"); + Literal obj2_2 = Values.literal("testValue_user1_obj2_2"); + IRI obj2 = Values.iri("http://example.com/user1/object2"); + + Model model = new ModelBuilder() + .subject(obj2) + .add(TYP1, obj2_1) + .add(TYP2, obj2_2) + .build(); + return model; + } + + private ArrayNode asArrayNode(EvaluateResult evaluateResult) + throws IOException, JsonProcessingException, JsonMappingException { + ByteArrayOutputStream stream = (ByteArrayOutputStream) evaluateResult.getOutputstream(); + String evaluateResultString = new String(stream.toByteArray()); + System.out.println(evaluateResultString); + ObjectMapper mapper = new ObjectMapper(); + JsonNode root = mapper.readTree(evaluateResultString); + ArrayNode bindingArray = (ArrayNode) root.get("results").get("bindings"); + return bindingArray; + } +} diff --git a/tools/config/pom.xml b/tools/config/pom.xml index 7f7c84d5df9..a58216f0997 100644 --- a/tools/config/pom.xml +++ b/tools/config/pom.xml @@ -19,6 +19,10 @@ ch.qos.logback logback-classic + + ch.qos.logback + logback-core + org.junit.jupiter junit-jupiter-engine diff --git a/tools/pom.xml b/tools/pom.xml index e4ba7cc3e48..bfa7b569246 100644 --- a/tools/pom.xml +++ b/tools/pom.xml @@ -16,6 +16,7 @@ federation server server-spring + server-spring6 server-boot workbench runtime diff --git a/tools/server-spring/pom.xml b/tools/server-spring/pom.xml index be9f901afa0..c67f095c92e 100644 --- a/tools/server-spring/pom.xml +++ b/tools/server-spring/pom.xml @@ -7,7 +7,7 @@ 5.3.0-SNAPSHOT rdf4j-http-server-spring - RDF4J: HTTP server - core + RDF4J: HTTP server - core (Spring 5) HTTP server implementing a REST-style protocol diff --git a/tools/server-spring6/pom.xml b/tools/server-spring6/pom.xml new file mode 100644 index 00000000000..1b4e9235c4f --- /dev/null +++ b/tools/server-spring6/pom.xml @@ -0,0 +1,78 @@ + + + 4.0.0 + + org.eclipse.rdf4j + rdf4j-tools + 5.3.0-SNAPSHOT + + rdf4j-http-server-spring6 + RDF4J: HTTP server - core (Spring 6) + HTTP server implementing a REST-style protocol + + + 17 + + 6.2.12 + 2.0.17 + 1.5.20 + 2.24.3 + + + + ${project.groupId} + rdf4j-client + ${project.version} + pom + + + ${project.groupId} + rdf4j-storage + ${project.version} + pom + + + ${project.groupId} + rdf4j-config + ${project.version} + + + ${project.groupId} + rdf4j-rio-rdfjson + ${project.version} + + + jakarta.servlet + jakarta.servlet-api + 6.0.0 + provided + + + org.springframework + spring-aop + runtime + + + org.springframework + spring-webmvc + + + org.slf4j + slf4j-api + + + com.google.guava + guava + + + org.mockito + mockito-core + test + + + org.springframework + spring-test + test + + + diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/CommonValuesHandlerInterceptor.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/CommonValuesHandlerInterceptor.java new file mode 100644 index 00000000000..290591123ae --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/CommonValuesHandlerInterceptor.java @@ -0,0 +1,50 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.common.webapp; + +import java.util.Calendar; + +import org.springframework.web.servlet.HandlerInterceptor; +import org.springframework.web.servlet.ModelAndView; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; + +/** + * Interceptor that inserts some commonly used values into the model. The inserted values are: - path, equal to + * request.getContextPath() (e.g. /context) - basePath, equal to the fully qualified context path (e.g. + * http://www.example.com/context/) - currentYear, equal to the current year + * + * @author Herko ter Horst + */ +public class CommonValuesHandlerInterceptor implements HandlerInterceptor { + + @Override + public void afterCompletion(HttpServletRequest request, HttpServletResponse response, Object handler, + Exception ex) { + // nop + } + + @Override + public void postHandle(HttpServletRequest request, HttpServletResponse response, Object handler, ModelAndView mav) { + mav.addObject("path", request.getContextPath()); + mav.addObject("basePath", request.getScheme() + "://" + request.getServerName() + ":" + request.getServerPort() + + request.getContextPath() + "/"); + mav.addObject("currentYear", Calendar.getInstance().get(Calendar.YEAR)); + } + + @Override + public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) + throws Exception { + return true; + } + +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/Message.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/Message.java new file mode 100644 index 00000000000..97ce9dc7923 --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/Message.java @@ -0,0 +1,49 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.common.webapp; + +/** + * @author Herko ter Horst + */ +public class Message { + + public static final String ATTRIBUTE_KEY = "message"; + + public enum Type { + ERROR, + WARN, + INFO + } + + private final Type type; + + private final String i18n; + + public Message(Type type, String i18n) { + this.type = type; + this.i18n = i18n; + } + + /** + * @return Returns the type. + */ + public Type getType() { + return type; + } + + /** + * @return Returns the i18n. + */ + public String getI18n() { + return i18n; + } + +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/MessageHandlerInterceptor.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/MessageHandlerInterceptor.java new file mode 100644 index 00000000000..ed23838cf7e --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/MessageHandlerInterceptor.java @@ -0,0 +1,66 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.common.webapp; + +import org.springframework.web.servlet.HandlerInterceptor; +import org.springframework.web.servlet.ModelAndView; +import org.springframework.web.servlet.view.RedirectView; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; +import jakarta.servlet.http.HttpSession; + +/** + * Interceptor that inserts some commonly used values into the model. The inserted values are: - path, equal to + * request.getContextPath() (e.g. /context) - basePath, equal to the fully qualified context path (e.g. + * http://www.example.com/context/) - currentYear, equal to the current year + * + * @author Herko ter Horst + */ +public class MessageHandlerInterceptor implements HandlerInterceptor { + + @Override + public void afterCompletion(HttpServletRequest request, HttpServletResponse response, Object handler, + Exception ex) { + // nop + } + + @Override + public void postHandle(HttpServletRequest request, HttpServletResponse response, Object handler, ModelAndView mav) { + HttpSession session = request.getSession(); + + if (session != null) { + Message message = (Message) session.getAttribute(Message.ATTRIBUTE_KEY); + if (message != null && !mav.getModelMap().containsKey(Message.ATTRIBUTE_KEY)) { + mav.addObject(Message.ATTRIBUTE_KEY, message); + } + + boolean shouldRemove = true; + if (mav.hasView() && mav.getView() instanceof RedirectView) { + shouldRemove = false; + } + if (mav.getViewName() != null && mav.getViewName().startsWith("redirect:")) { + shouldRemove = false; + } + + if (shouldRemove) { + session.removeAttribute(Message.ATTRIBUTE_KEY); + } + } + } + + @Override + public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) + throws Exception { + return true; + } + +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/filters/PathFilter.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/filters/PathFilter.java new file mode 100644 index 00000000000..9113c5cf553 --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/filters/PathFilter.java @@ -0,0 +1,80 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.common.webapp.filters; + +import java.io.CharArrayWriter; +import java.io.IOException; +import java.io.PrintWriter; + +import jakarta.servlet.Filter; +import jakarta.servlet.FilterChain; +import jakarta.servlet.FilterConfig; +import jakarta.servlet.ServletException; +import jakarta.servlet.ServletRequest; +import jakarta.servlet.ServletResponse; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; +import jakarta.servlet.http.HttpServletResponseWrapper; + +/** + * @author Herko ter Horst + */ +public class PathFilter implements Filter { + + @Override + public void init(FilterConfig filterConf) throws ServletException { + // do nothing + } + + @Override + public void destroy() { + // do nothing + } + + @Override + public void doFilter(ServletRequest req, ServletResponse res, FilterChain filterChain) + throws IOException, ServletException { + if (req instanceof HttpServletRequest) { + HttpServletRequest request = (HttpServletRequest) req; + HttpServletResponse response = (HttpServletResponse) res; + String path = request.getContextPath(); + + PrintWriter out = response.getWriter(); + CharResponseWrapper wrapper = new CharResponseWrapper((HttpServletResponse) response); + filterChain.doFilter(request, wrapper); + CharArrayWriter caw = new CharArrayWriter(); + caw.write(wrapper.toString().replace("${path}", path)); + String result = caw.toString(); + response.setContentLength(result.length()); + out.write(result); + } + } + + private static class CharResponseWrapper extends HttpServletResponseWrapper { + + private final CharArrayWriter output; + + @Override + public String toString() { + return output.toString(); + } + + public CharResponseWrapper(HttpServletResponse response) { + super(response); + output = new CharArrayWriter(); + } + + @Override + public PrintWriter getWriter() { + return new PrintWriter(output); + } + } +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/navigation/AbstractNavigationNode.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/navigation/AbstractNavigationNode.java new file mode 100644 index 00000000000..d0478a48575 --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/navigation/AbstractNavigationNode.java @@ -0,0 +1,296 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.common.webapp.navigation; + +/** + * Base implementation of the NavigationNode interface. + * + * @author Herko ter Horst + */ +public abstract class AbstractNavigationNode implements NavigationNode { + + private String id; + + private boolean hidden; + + private boolean enabled; + + private NavigationNode parent; + + protected String path; + + protected String icon; + + protected String i18n; + + protected String viewSuffix; + + public AbstractNavigationNode(String id) { + setId(id); + setEnabled(true); + } + + @Override + public String getId() { + return id; + } + + void setId(String id) { + this.id = id; + } + + @Override + public boolean isHidden() { + return hidden; + } + + @Override + public void setHidden(boolean hidden) { + this.hidden = hidden; + } + + @Override + public boolean isEnabled() { + return enabled; + } + + @Override + public void setEnabled(boolean enabled) { + this.enabled = enabled; + } + + @Override + public NavigationNode getParent() { + return parent; + } + + @Override + public void setParent(NavigationNode parent) { + this.parent = parent; + } + + @Override + public boolean isParent(NavigationNode node) { + boolean result = false; + + if (node != null && node != this) { + if (node.getParent() == this) { + result = true; + } else if (node.getParent() != null) { + result = isParent(node.getParent()); + } + } + + return result; + } + + @Override + public String getPathPrefix() { + StringBuilder result = new StringBuilder(); + if (getParent() != null) { + if (getParent().getPathPrefix() != null) { + result.append(getParent().getPathPrefix()); + } + if (!getParent().getId().isEmpty()) { + result.append(getParent().getId()); + result.append(getPathSeparator()); + } + } + return result.toString(); + } + + @Override + public String getPathSeparator() { + String result = null; + if (getParent() != null) { + result = getParent().getPathSeparator(); + } + return result; + } + + @Override + public String getPath() { + if (path == null) { + StringBuilder result = new StringBuilder(); + result.append(getPathPrefix()); + result.append(getId()); + setPath(result.toString()); + } + return path; + } + + @Override + public void setPath(String path) { + this.path = path; + } + + @Override + public String getIconPrefix() { + StringBuilder result = new StringBuilder(); + if (getParent() != null) { + if (getParent().getIconPrefix() != null) { + result.append(getParent().getIconPrefix()); + } + if (!getParent().getId().isEmpty()) { + result.append(getParent().getId()); + result.append(getIconSeparator()); + } + } + return result.toString(); + } + + @Override + public String getIconSeparator() { + String result = null; + if (getParent() != null) { + result = getParent().getIconSeparator(); + } + return result; + } + + @Override + public String getIconSuffix() { + String result = null; + if (getParent() != null) { + result = getParent().getIconSuffix(); + } + return result; + } + + @Override + public String getIcon() { + if (icon == null) { + StringBuilder result = new StringBuilder(); + result.append(getIconPrefix()); + result.append(getId()); + result.append(getIconSuffix()); + setIcon(result.toString()); + } + + return icon; + } + + @Override + public void setIcon(String icon) { + this.icon = icon; + } + + @Override + public String getI18nPrefix() { + StringBuilder result = new StringBuilder(); + if (getParent() != null) { + if (getParent().getI18nPrefix() != null) { + result.append(getParent().getI18nPrefix()); + } + if (!getParent().getId().isEmpty()) { + result.append(getParent().getId()); + result.append(getI18nSeparator()); + } + } + return result.toString(); + } + + @Override + public String getI18nSeparator() { + String result = null; + if (getParent() != null) { + result = getParent().getI18nSeparator(); + } + return result; + } + + @Override + public String getI18nSuffix() { + String result = null; + if (getParent() != null) { + result = getParent().getI18nSuffix(); + } + return result; + } + + @Override + public String getI18n() { + if (i18n == null) { + StringBuilder result = new StringBuilder(); + result.append(getI18nPrefix()); + result.append(getId()); + result.append(getI18nSuffix()); + setI18n(result.toString()); + } + return i18n; + } + + @Override + public void setI18n(String i18n) { + this.i18n = i18n; + } + + @Override + public String getViewSuffix() { + if (viewSuffix == null) { + if (getParent() != null) { + setViewSuffix(getParent().getViewSuffix()); + } + } + return viewSuffix; + } + + @Override + public void setViewSuffix(String viewSuffix) { + this.viewSuffix = viewSuffix; + } + + @Override + public int getDepth() { + int result = 0; + + if (getParent() != null) { + result = getParent().getDepth() + 1; + } + + return result; + } + + @Override + public boolean equals(Object other) { + boolean result = this == other; + if (!result && other instanceof NavigationNode && getClass().equals(other.getClass())) { + NavigationNode otherNode = (NavigationNode) other; + result = getId().equals(otherNode.getId()); + if (result && !(getParent() == null && otherNode.getParent() == null)) { + if (getParent() != null && otherNode.getParent() != null) { + result = getParent().equals(otherNode.getParent()); + } else { + result = false; + } + } + } + return result; + } + + @Override + public int hashCode() { + int result = getId().hashCode(); + if (getParent() != null) { + result += 31 * getParent().hashCode(); + } + return result; + } + + protected void copyCommonAttributes(NavigationNode node) { + node.setEnabled(isEnabled()); + node.setHidden(isHidden()); + node.setI18n(getI18n()); + node.setIcon(getIcon()); + node.setPath(getPath()); + node.setViewSuffix(getViewSuffix()); + } +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/navigation/Group.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/navigation/Group.java new file mode 100644 index 00000000000..e42c537f968 --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/navigation/Group.java @@ -0,0 +1,139 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.common.webapp.navigation; + +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; + +/** + * Group represents a logical group of views in a NavigationModel. + * + * @author Herko ter Horst + */ +public class Group extends AbstractNavigationNode { + + protected Map groups; + + protected Map views; + + protected Map viewNames; + + /** + * Construct a new group with the specified ID. + * + * @param id the ID of the group + */ + public Group(String id) { + super(id); + groups = new LinkedHashMap<>(); + views = new LinkedHashMap<>(); + viewNames = new LinkedHashMap<>(); + } + + /** + * Add a group to this group. The group becomes a sub-group of this group. + * + * @param group the group to add + */ + public void addGroup(Group group) { + group.setParent(this); + groups.put(group.getId(), group); + } + + /** + * Get the sub-group with the specified ID. + * + * @param id the ID of the sub-group + * @return the sub-group with the specified ID, or null if this group doesn't contain a sub-group with that ID + */ + public Group getGroup(String id) { + return groups.get(id); + } + + /** + * Get the list of sub-groups + * + * @return the list of sub-groups + */ + public List getGroups() { + return new ArrayList<>(groups.values()); + } + + /** + * Add a view to this group. + * + * @param view the view to add + */ + public void addView(View view) { + view.setParent(this); + views.put(view.getId(), view); + viewNames.put(view.getId() + view.getViewSuffix(), view); + } + + public View getView(String viewId) { + return views.get(viewId); + } + + /** + * Get the view with the specified name. + * + * @param viewName the name of the view (ID+suffix) + * @return the view with the specified name, or null if this group doesn't contain a view with that name + */ + public View getViewByName(String viewName) { + return viewNames.get(viewName); + } + + protected View findViewInternal(String viewName) { + View result; + + int indexOfSeparator = viewName.indexOf(getPathSeparator()); + if (indexOfSeparator > 0) { + String groupId = viewName.substring(0, indexOfSeparator); + Group subGroup = getGroup(groupId); + result = subGroup.findViewInternal(viewName.substring(indexOfSeparator + 1)); + } else { + result = getViewByName(viewName); + } + + return result; + } + + /** + * Get the list of views. + * + * @return the list of views + */ + public List getViews() { + return new ArrayList<>(views.values()); + } + + @Override + public Object clone() { + Group result = new Group(getId()); + copyCommonAttributes(result); + copyGroupsAndViews(result); + return result; + } + + protected void copyGroupsAndViews(Group group) { + for (Group subGroup : getGroups()) { + Group clonedGroup = (Group) subGroup.clone(); + group.addGroup(clonedGroup); + } + for (View view : getViews()) { + View clonedView = (View) view.clone(); + group.addView(clonedView); + } + } +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/navigation/NavigationHandlerInterceptor.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/navigation/NavigationHandlerInterceptor.java new file mode 100644 index 00000000000..052dcfaafaa --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/navigation/NavigationHandlerInterceptor.java @@ -0,0 +1,67 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.common.webapp.navigation; + +import org.springframework.web.servlet.HandlerInterceptor; +import org.springframework.web.servlet.ModelAndView; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; +import jakarta.servlet.http.HttpSession; + +/** + * Interceptor that inserts the navigation model for the current Spring view into the model. + * + * @author Herko ter Horst + */ +public class NavigationHandlerInterceptor implements HandlerInterceptor { + + private NavigationModel navigationModel; + + public NavigationModel getNavigationModel() { + return navigationModel; + } + + public void setNavigationModel(NavigationModel navigationModel) { + this.navigationModel = navigationModel; + } + + @Override + public void afterCompletion(HttpServletRequest request, HttpServletResponse response, Object handler, + Exception ex) { + // nop + } + + @Override + public void postHandle(HttpServletRequest request, HttpServletResponse response, Object handler, ModelAndView mav) { + NavigationModel sessionNavigationModel = (NavigationModel) request.getSession() + .getAttribute(NavigationModel.NAVIGATION_MODEL_KEY); + if (sessionNavigationModel == null) { + sessionNavigationModel = navigationModel; + } + + if (mav != null && sessionNavigationModel != null) { + mav.addObject("view", sessionNavigationModel + .findView(request.getRequestURI().substring(request.getContextPath().length()))); + } + } + + @Override + public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) { + HttpSession session = request.getSession(true); + if (session.getAttribute(NavigationModel.NAVIGATION_MODEL_KEY) == null) { + session.setAttribute(NavigationModel.NAVIGATION_MODEL_KEY, getNavigationModel().clone()); + } + + return true; + } + +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/navigation/NavigationModel.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/navigation/NavigationModel.java new file mode 100644 index 00000000000..21939ef9ca4 --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/navigation/NavigationModel.java @@ -0,0 +1,251 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.common.webapp.navigation; + +import java.util.ArrayList; +import java.util.List; + +import org.eclipse.rdf4j.common.io.ResourceUtil; + +/** + * NavigationModel represents the navigation structure of a web application. A model consists of groups and views. + * + * @author Herko ter Horst + */ +public class NavigationModel extends Group { + + public static final String NAVIGATION_MODEL_KEY = "navigation-model"; + + public static final String DEFAULT_PATH_PREFIX = "/"; + + public static final String DEFAULT_PATH_SEPARATOR = "/"; + + public static final String DEFAULT_VIEW_SUFFIX = ".view"; + + public static final String DEFAULT_ICON_PREFIX = "/images/icons/"; + + public static final String DEFAULT_ICON_SEPARATOR = "_"; + + public static final String DEFAULT_ICON_SUFFIX = ".png"; + + public static final String DEFAULT_I18N_PREFIX = ""; + + public static final String DEFAULT_I18N_SEPARATOR = "."; + + public static final String DEFAULT_I18N_SUFFIX = ".title"; + + private List navigationModelLocations = new ArrayList<>(); + + private String pathPrefix; + + private String pathSeparator; + + private String iconPrefix; + + private String iconSeparator; + + private String iconSuffix; + + private String i18nPrefix; + + private String i18nSeparator; + + private String i18nSuffix; + + /** + * Construct a new, anonymous, empty NavigationModel + */ + public NavigationModel() { + super(null); + } + + /** + * Construct a new emtpy NavigationModel with the specified ID. + * + * @param id the ID of the NavigationModel + */ + public NavigationModel(String id) { + super(id); + } + + @Override + public String getId() { + return ""; + } + + @Override + public String getPathPrefix() { + if (pathPrefix == null) { + setPathPrefix(DEFAULT_PATH_PREFIX); + } + return pathPrefix; + } + + public void setPathPrefix(String pathPrefix) { + this.pathPrefix = pathPrefix; + } + + @Override + public String getPathSeparator() { + if (pathSeparator == null) { + setPathSeparator(DEFAULT_PATH_SEPARATOR); + } + return pathSeparator; + } + + public void setPathSeparator(String pathSeparator) { + this.pathSeparator = pathSeparator; + } + + @Override + public String getIconPrefix() { + if (iconPrefix == null) { + setIconPrefix(DEFAULT_ICON_PREFIX); + } + return iconPrefix; + } + + public void setIconPrefix(String iconPrefix) { + this.iconPrefix = iconPrefix; + } + + @Override + public String getIconSeparator() { + if (iconSeparator == null) { + setIconSeparator(DEFAULT_ICON_SEPARATOR); + } + return iconSeparator; + } + + public void setIconSeparator(String iconSeparator) { + this.iconSeparator = iconSeparator; + } + + @Override + public String getIconSuffix() { + if (iconSuffix == null) { + setIconSuffix(DEFAULT_ICON_SUFFIX); + } + return iconSuffix; + } + + public void setIconSuffix(String iconSuffix) { + this.iconSuffix = iconSuffix; + } + + @Override + public String getI18nPrefix() { + if (i18nPrefix == null) { + setI18nPrefix(DEFAULT_I18N_PREFIX); + } + return i18nPrefix; + } + + public void setI18nPrefix(String i18nPrefix) { + this.i18nPrefix = i18nPrefix; + } + + @Override + public String getI18nSeparator() { + if (i18nSeparator == null) { + setI18nSeparator(DEFAULT_I18N_SEPARATOR); + } + return i18nSeparator; + } + + public void setI18nSeparator(String i18nSeparator) { + this.i18nSeparator = i18nSeparator; + } + + @Override + public String getI18nSuffix() { + if (i18nSuffix == null) { + setI18nSuffix(DEFAULT_I18N_SUFFIX); + } + return i18nSuffix; + } + + public void setI18nSuffix(String i18nSuffix) { + this.i18nSuffix = i18nSuffix; + } + + @Override + public String getViewSuffix() { + if (viewSuffix == null) { + setViewSuffix(DEFAULT_VIEW_SUFFIX); + } + return viewSuffix; + } + + /** + * Find the view with the specified name in the NavigationModel. + * + * @param viewName the name of the view, specified as a /-separated hierarchy of groups, where the part after the + * last / is interpreted as the name of the view itself. + * @return the view, or null if no view matching the specified name could be found + */ + public View findView(String viewName) { + View result; + + int prefixLength = getPathPrefix().length(); + viewName = viewName.substring(prefixLength); + result = findViewInternal(viewName); + + return result; + } + + /** + * Add another NavigationModel to this one. This is done by adding all groups and view from the other model to this + * one. + * + * @param other the model to add to this one. + */ + public void addModel(NavigationModel other) { + for (Group group : other.getGroups()) { + addGroup(group); + } + for (View view : other.getViews()) { + addView(view); + } + } + + /** + * Set the locations of the navigation model resources to be used in the construction of this model. Calling this + * method will cause this NavigationModel to be initialized. + * + * @param navigationModelLocations a list of resource names + */ + public void setNavigationModels(List navigationModelLocations) { + this.navigationModelLocations = navigationModelLocations; + createNavigationModel(); + } + + private void createNavigationModel() { + boolean first = true; + for (String navigationModelLocation : navigationModelLocations) { + NavigationXmlParser parser = new NavigationXmlParser(); + if (first) { + parser.parseInto(this, ResourceUtil.getURL(navigationModelLocation)); + first = false; + } else { + addModel(parser.parse(ResourceUtil.getURL(navigationModelLocation))); + } + } + } + + @Override + public Object clone() { + NavigationModel result = new NavigationModel(getId()); + copyCommonAttributes(result); + copyGroupsAndViews(result); + return result; + } +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/navigation/NavigationNode.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/navigation/NavigationNode.java new file mode 100644 index 00000000000..7b4c4b0bb0d --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/navigation/NavigationNode.java @@ -0,0 +1,116 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.common.webapp.navigation; + +/** + * NavigationNode represents a node in a NavigationModel. + * + * @author Herko ter Horst + */ +public interface NavigationNode extends Cloneable { + + /** + * Get the ID of the node. + * + * @return the ID of the node + */ + String getId(); + + /** + * Is the node hidden? + * + * @return true if the node is hidden, false otherwise + */ + boolean isHidden(); + + /** + * Set the hidden status of the node. + * + * @param hidden the new hidden status of the node + */ + void setHidden(boolean hidden); + + /** + * Is the node enabled/active? + * + * @return true if the node is enabled, false otherwise + */ + boolean isEnabled(); + + /** + * Set the enabled status of the node. + * + * @param enabled the new enabled status of the node + */ + void setEnabled(boolean enabled); + + /** + * Get the parent node of this node. + * + * @return the parent node of this node, or null if this node is the root NavigationModel + */ + NavigationNode getParent(); + + /** + * Set the parent of this node. + * + * @param parent the new parent of this node + */ + void setParent(NavigationNode parent); + + /** + * Is this node a parent of the specified node? + * + * @param node the node to check + * @return true if this node is a direct or indirect parent of the specified node, false otherwise + */ + boolean isParent(NavigationNode node); + + /** + * Get the depth of this node in the hierarchy. The root NavigationModel has depth 0, all other nodes have a depth + * equal to the depth of their parent + 1. + * + * @return the depth of the node in the hierarhcy + */ + int getDepth(); + + String getPathPrefix(); + + String getPathSeparator(); + + String getPath(); + + void setPath(String path); + + String getIconPrefix(); + + String getIconSeparator(); + + String getIconSuffix(); + + String getIcon(); + + void setIcon(String icon); + + String getI18nPrefix(); + + String getI18nSeparator(); + + String getI18nSuffix(); + + String getI18n(); + + void setI18n(String i18n); + + String getViewSuffix(); + + void setViewSuffix(String suffix); +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/navigation/NavigationXmlParser.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/navigation/NavigationXmlParser.java new file mode 100644 index 00000000000..387e3609abd --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/navigation/NavigationXmlParser.java @@ -0,0 +1,154 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.common.webapp.navigation; + +import java.io.IOException; +import java.net.URL; + +import javax.xml.xpath.XPath; +import javax.xml.xpath.XPathConstants; +import javax.xml.xpath.XPathExpressionException; +import javax.xml.xpath.XPathFactory; + +import org.eclipse.rdf4j.common.xml.DocumentUtil; +import org.w3c.dom.Document; +import org.w3c.dom.Node; +import org.w3c.dom.NodeList; + +/** + * XPath-based parser for NavigationModel configuration files. + * + * @author Herko ter Horst + */ +public class NavigationXmlParser { + + private final XPath xpath = XPathFactory.newInstance().newXPath(); + + public NavigationModel parse(URL navigationXml) { + NavigationModel result = new NavigationModel(); + parseInto(result, navigationXml); + return result; + } + + public void parseInto(NavigationModel result, URL navigationXml) { + try { + Document document = DocumentUtil.getDocument(navigationXml); + Node rootNode = (Node) xpath.evaluate("/navigation", document, XPathConstants.NODE); + fillModel(result, rootNode); + } catch (IOException | XPathExpressionException e) { + e.printStackTrace(); + } + } + + private void fillModel(NavigationModel result, Node modelNode) throws XPathExpressionException { + String id = xpath.evaluate("@id", modelNode); + result.setId(id); + + String pathPrefix = xpath.evaluate("path-prefix", modelNode); + if (!"".equals(pathPrefix)) { + result.setPathPrefix(pathPrefix); + } + String pathSeparator = xpath.evaluate("path-separator", modelNode); + if (!"".equals(pathSeparator)) { + result.setPathSeparator(pathSeparator); + } + + String iconPrefix = xpath.evaluate("icon-prefix", modelNode); + if (!"".equals(iconPrefix)) { + result.setIconPrefix(iconPrefix); + } + String iconSeparator = xpath.evaluate("icon-separator", modelNode); + if (!"".equals(iconSeparator)) { + result.setIconSeparator(iconSeparator); + } + String iconSuffix = xpath.evaluate("icon-suffix", modelNode); + if (!"".equals(iconSuffix)) { + result.setIconSuffix(iconSuffix); + } + + String i18nPrefix = xpath.evaluate("i18n-prefix", modelNode); + if (!"".equals(i18nPrefix)) { + result.setI18nPrefix(i18nPrefix); + } + String i18nSeparator = xpath.evaluate("i18n-separator", modelNode); + if (!"".equals(i18nSeparator)) { + result.setI18nSeparator(i18nSeparator); + } + String i18nSuffix = xpath.evaluate("i18n-suffix", modelNode); + if (!"".equals(i18nSuffix)) { + result.setI18nSuffix(i18nSuffix); + } + + setAttributes(result, modelNode); + + setGroupsAndViews(result, modelNode); + } + + private void setAttributes(NavigationNode navNode, Node xmlNode) throws XPathExpressionException { + boolean hidden = getBooleanAttribute(xpath.evaluate("@hidden", xmlNode), false); + navNode.setHidden(hidden); + + boolean enabled = getBooleanAttribute(xpath.evaluate("@enabled", xmlNode), true); + navNode.setEnabled(enabled); + + String path = xpath.evaluate("path", xmlNode); + if (!"".equals(path)) { + navNode.setPath(path); + } + + String icon = xpath.evaluate("icon", xmlNode); + if (!"".equals(icon)) { + navNode.setIcon(icon); + } + + String i18n = xpath.evaluate("i18n", xmlNode); + if (!"".equals(i18n)) { + navNode.setI18n(i18n); + } + + String viewSuffix = xpath.evaluate("view-suffix", xmlNode); + if (!"".equals(viewSuffix)) { + navNode.setViewSuffix(viewSuffix); + } + } + + private void setGroupsAndViews(Group parent, Node xmlNode) throws XPathExpressionException { + NodeList groupList = (NodeList) xpath.evaluate("group", xmlNode, XPathConstants.NODESET); + int groupCount = groupList.getLength(); + for (int groupIndex = 0; groupIndex < groupCount; groupIndex++) { + Node groupNode = groupList.item(groupIndex); + + Group group = new Group(xpath.evaluate("@id", groupNode)); + parent.addGroup(group); + setAttributes(group, groupNode); + setGroupsAndViews(group, groupNode); + } + + NodeList viewList = (NodeList) xpath.evaluate("view", xmlNode, XPathConstants.NODESET); + int viewCount = viewList.getLength(); + for (int viewIndex = 0; viewIndex < viewCount; viewIndex++) { + Node viewNode = viewList.item(viewIndex); + + View view = new View(xpath.evaluate("@id", viewNode)); + parent.addView(view); + setAttributes(view, viewNode); + } + } + + private boolean getBooleanAttribute(String attrValue, boolean defaultValue) { + boolean result = defaultValue; + if (attrValue != null && !attrValue.trim().isEmpty()) { + result = attrValue.equalsIgnoreCase("true") || attrValue.equalsIgnoreCase("yes") + || attrValue.equalsIgnoreCase("on"); + } + return result; + } +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/navigation/View.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/navigation/View.java new file mode 100644 index 00000000000..5fbf3f89dee --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/navigation/View.java @@ -0,0 +1,46 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.common.webapp.navigation; + +/** + * View represents a "leaf" in the navigation model, for example a page in a website. + * + * @author Herko ter Horst + */ +public class View extends AbstractNavigationNode { + + /** + * Construct a new view with the specified ID. + * + * @param id the ID of the view + */ + public View(String id) { + super(id); + } + + @Override + public String getPath() { + if (path == null) { + StringBuilder result = new StringBuilder(); + result.append(super.getPath()); + result.append(getViewSuffix()); + setPath(result.toString()); + } + return path; + } + + @Override + public Object clone() { + View result = new View(getId()); + copyCommonAttributes(result); + return result; + } +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/navigation/functions/NavigationFunctions.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/navigation/functions/NavigationFunctions.java new file mode 100644 index 00000000000..ec8538ff98b --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/navigation/functions/NavigationFunctions.java @@ -0,0 +1,32 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.common.webapp.navigation.functions; + +import org.eclipse.rdf4j.common.webapp.navigation.NavigationNode; + +/** + * JSTL functions for navigation. + * + * @author Herko ter Horst + */ +public class NavigationFunctions { + + /** + * Is the specified potential parent indeed a parent of the specified node. + * + * @param potentialParent the potential parent + * @param node the node + * @return true if the potential parent is part of the hierarchical string of parents for the specified node + */ + public static boolean isParent(NavigationNode potentialParent, NavigationNode node) { + return potentialParent.isParent(node); + } +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/system/SystemInfoController.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/system/SystemInfoController.java new file mode 100644 index 00000000000..6037f0baf05 --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/system/SystemInfoController.java @@ -0,0 +1,162 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.common.webapp.system; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.Iterator; +import java.util.LinkedHashMap; +import java.util.Map; +import java.util.Properties; + +import org.eclipse.rdf4j.common.app.AppConfiguration; +import org.springframework.web.servlet.ModelAndView; +import org.springframework.web.servlet.mvc.Controller; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; + +public class SystemInfoController implements Controller { + + private String view; + + private AppConfiguration config; + + private final ServerInfo server; + + public SystemInfoController() { + server = new ServerInfo(); + } + + public String getView() { + return view; + } + + public void setView(String view) { + this.view = view; + } + + @Override + public ModelAndView handleRequest(HttpServletRequest request, HttpServletResponse response) throws Exception { + ModelAndView result = new ModelAndView(); + result.setViewName(view); + + Map model = new HashMap<>(); + model.put("appConfig", config); + model.put("server", server); + model.put("memory", new MemoryInfo()); + model.put("javaProps", getJavaPropStrings()); + model.put("envVars", getEnvVarStrings()); + result.addAllObjects(model); + + return result; + } + + public AppConfiguration getConfig() { + return config; + } + + public void setConfig(AppConfiguration config) { + this.config = config; + } + + public static class ServerInfo { + + private final String os; + + private final String java; + + private final String user; + + public ServerInfo() { + os = System.getProperty("os.name") + " " + System.getProperty("os.version") + " (" + + System.getProperty("os.arch") + ")"; + java = System.getProperty("java.vendor") + " " + System.getProperty("java.vm.name") + " " + + System.getProperty("java.version"); + user = System.getProperty("user.name"); + } + + public String getOs() { + return os; + } + + public String getJava() { + return java; + } + + public String getUser() { + return user; + } + } + + public static class MemoryInfo { + + private final int maximum; + + private final int used; + + private final float percentageInUse; + + public MemoryInfo() { + Runtime runtime = Runtime.getRuntime(); + long usedMemory = runtime.totalMemory() - runtime.freeMemory(); + long maxMemory = runtime.maxMemory(); + + // Memory usage (percentage) + percentageInUse = (float) ((float) usedMemory / (float) maxMemory); + + // Memory usage in MB + used = (int) (usedMemory / 1024 / 1024); + maximum = (int) (maxMemory / 1024 / 1024); + } + + public int getMaximum() { + return maximum; + } + + public int getUsed() { + return used; + } + + public float getPercentageInUse() { + return percentageInUse; + } + } + + private Map getJavaPropStrings() { + Properties sysProps = System.getProperties(); + ArrayList keyList = new ArrayList(sysProps.keySet()); + Collections.sort(keyList); + Map result = new LinkedHashMap(keyList.size()); + Iterator sysPropNames = keyList.iterator(); + while (sysPropNames.hasNext()) { + String name = sysPropNames.next(); + if (!name.startsWith("aduna")) { + result.put(name, sysProps.get(name)); + } + } + return result; + } + + private Map getEnvVarStrings() { + Map envProps = System.getenv(); + ArrayList keyList = new ArrayList(envProps.keySet()); + Collections.sort(keyList); + Map result = new LinkedHashMap(keyList.size()); + Iterator envPropNames = keyList.iterator(); + while (envPropNames.hasNext()) { + String name = envPropNames.next(); + result.put(name, envProps.get(name)); + } + return result; + } +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/system/SystemOverviewController.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/system/SystemOverviewController.java new file mode 100644 index 00000000000..19900484265 --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/system/SystemOverviewController.java @@ -0,0 +1,127 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.common.webapp.system; + +import java.util.HashMap; +import java.util.Map; + +import org.eclipse.rdf4j.common.app.AppConfiguration; +import org.springframework.web.servlet.ModelAndView; +import org.springframework.web.servlet.mvc.Controller; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; + +public class SystemOverviewController implements Controller { + + private String view; + + private AppConfiguration config; + + private final ServerInfo server; + + public SystemOverviewController() { + server = new ServerInfo(); + } + + public String getView() { + return view; + } + + public void setView(String view) { + this.view = view; + } + + @Override + public ModelAndView handleRequest(HttpServletRequest request, HttpServletResponse response) throws Exception { + ModelAndView result = new ModelAndView(); + result.setViewName(view); + + Map model = new HashMap<>(); + model.put("appConfig", config); + model.put("server", server); + model.put("memory", new MemoryInfo()); + result.addAllObjects(model); + + return result; + } + + public AppConfiguration getConfig() { + return config; + } + + public void setConfig(AppConfiguration config) { + this.config = config; + } + + public static class ServerInfo { + + private final String os; + + private final String java; + + private final String user; + + public ServerInfo() { + os = System.getProperty("os.name") + " " + System.getProperty("os.version") + " (" + + System.getProperty("os.arch") + ")"; + java = System.getProperty("java.vendor") + " " + System.getProperty("java.vm.name") + " " + + System.getProperty("java.version"); + user = System.getProperty("user.name"); + } + + public String getOs() { + return os; + } + + public String getJava() { + return java; + } + + public String getUser() { + return user; + } + } + + public static class MemoryInfo { + + private final int maximum; + + private final int used; + + private final float percentageInUse; + + public MemoryInfo() { + Runtime runtime = Runtime.getRuntime(); + long usedMemory = runtime.totalMemory() - runtime.freeMemory(); + long maxMemory = runtime.maxMemory(); + + // Memory usage (percentage) + percentageInUse = (float) ((float) usedMemory / (float) maxMemory); + + // Memory usage in MB + used = (int) (usedMemory / 1024 / 1024); + maximum = (int) (maxMemory / 1024 / 1024); + } + + public int getMaximum() { + return maximum; + } + + public int getUsed() { + return used; + } + + public float getPercentageInUse() { + return percentageInUse; + } + } +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/system/logging/LoggingOverviewController.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/system/logging/LoggingOverviewController.java new file mode 100644 index 00000000000..405ad5e30d3 --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/system/logging/LoggingOverviewController.java @@ -0,0 +1,209 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.common.webapp.system.logging; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Calendar; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.eclipse.rdf4j.common.app.AppConfiguration; +import org.eclipse.rdf4j.common.logging.LogLevel; +import org.eclipse.rdf4j.common.logging.LogReader; +import org.springframework.web.servlet.ModelAndView; +import org.springframework.web.servlet.mvc.Controller; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; + +public class LoggingOverviewController implements Controller { + + private AppConfiguration config; + + String viewName = "system/logging/overview"; + + String appenderName = null; + + String[] loglevels = { "All", LogLevel.ERROR.toString(), LogLevel.WARN.toString(), LogLevel.INFO.toString(), + LogLevel.DEBUG.toString() }; + + @Override + public ModelAndView handleRequest(HttpServletRequest request, HttpServletResponse response) throws Exception { + int offset = getOffset(request); + int count = getCount(request); + Map model = new HashMap<>(); + LogReader logReader = getLogReader(offset, count, request); + model.put("logreader", logReader); + model.put("offset", offset); + model.put("count", count); + model.put("countsAvailable", Arrays.asList(50, 100, 200, 500)); + if (logReader.supportsLevelFilter()) { + LogLevel level = logReader.getLevel(); + model.put("level", (level == null) ? "ALL" : level.toString()); + model.put("loglevels", Arrays.asList(this.loglevels)); + } + if (logReader.supportsThreadFilter()) { + String thread = logReader.getThread(); + model.put("thread", (thread == null) ? "ALL" : thread); + List l = new ArrayList<>(); + l.add("All"); + l.addAll(logReader.getThreadNames()); + model.put("threadnames", l); + } + if (logReader.supportsDateRanges()) { + Calendar cal = Calendar.getInstance(); + if (logReader.getStartDate() != null) { + cal.setTime(logReader.getStartDate()); + model.put("startDate", Boolean.TRUE); + } else { + cal.setTime(logReader.getMinDate()); + model.put("startDate", Boolean.FALSE); + } + model.put("s_year", cal.get(Calendar.YEAR)); + model.put("s_month", cal.get(Calendar.MONTH)); + model.put("s_day", cal.get(Calendar.DAY_OF_MONTH)); + model.put("s_hour", cal.get(Calendar.HOUR_OF_DAY)); + model.put("s_min", cal.get(Calendar.MINUTE)); + cal = Calendar.getInstance(); + if (logReader.getEndDate() != null) { + cal.setTime(logReader.getEndDate()); + model.put("endDate", Boolean.TRUE); + } else { + cal.setTime(logReader.getMaxDate()); + model.put("endDate", Boolean.FALSE); + } + model.put("e_year", cal.get(Calendar.YEAR)); + model.put("e_month", cal.get(Calendar.MONTH)); + model.put("e_day", cal.get(Calendar.DAY_OF_MONTH)); + model.put("e_hour", cal.get(Calendar.HOUR_OF_DAY)); + model.put("e_min", cal.get(Calendar.MINUTE)); + } + return new ModelAndView(this.viewName, model); + } + + public LogReader getLogReader(int offset, int count, HttpServletRequest request) { + LogReader logReader = (LogReader) request.getSession() + .getAttribute("logreader" + (appenderName != null ? "+" + appenderName : "")); + if (logReader == null) { + if (appenderName == null) { + logReader = config.getLogConfiguration().getDefaultLogReader(); + } else { + logReader = config.getLogConfiguration().getLogReader(appenderName); + } + request.getSession() + .setAttribute("logreader" + (appenderName != null ? "+" + appenderName : ""), logReader); + } + logReader.setOffset(offset); + logReader.setLimit(count); + if (logReader.supportsLevelFilter() && (request.getParameter("level") != null)) { + if (request.getParameter("level").equalsIgnoreCase("ALL")) { + logReader.setLevel(null); + } else { + logReader.setLevel(LogLevel.valueOf(request.getParameter("level"))); + } + } + if (logReader.supportsThreadFilter() && (request.getParameter("thread") != null)) { + if (request.getParameter("thread").equalsIgnoreCase("ALL")) { + logReader.setThread(null); + } else { + logReader.setThread(request.getParameter("thread")); + } + } + if (logReader.supportsDateRanges() && (request.getParameter("filterapplied") != null)) { + if (request.getParameter("applystartdate") != null) { + Calendar cal = Calendar.getInstance(); + cal.set(Integer.parseInt(request.getParameter("s_year")), + Integer.parseInt(request.getParameter("s_month")), + Integer.parseInt(request.getParameter("s_day")), + Integer.parseInt(request.getParameter("s_hour")), + Integer.parseInt(request.getParameter("s_min")), 0); + logReader.setStartDate(cal.getTime()); + } else if (logReader.getStartDate() != null) { + logReader.setStartDate(null); + } + if (request.getParameter("applyenddate") != null) { + Calendar cal = Calendar.getInstance(); + cal.set(Integer.parseInt(request.getParameter("e_year")), + Integer.parseInt(request.getParameter("e_month")), + Integer.parseInt(request.getParameter("e_day")), + Integer.parseInt(request.getParameter("e_hour")), + Integer.parseInt(request.getParameter("e_min")), 59); + logReader.setEndDate(cal.getTime()); + } else if (logReader.getEndDate() != null) { + logReader.setEndDate(null); + } + } + try { + logReader.init(); + } catch (Exception e) { + throw new RuntimeException("Unable to initialize log reader.", e); + } + return logReader; + } + + public AppConfiguration getConfig() { + return config; + } + + public void setConfig(AppConfiguration config) { + this.config = config; + } + + private int getOffset(HttpServletRequest request) { + int result = 0; + + String offsetString = request.getParameter("offset"); + if (offsetString != null && !offsetString.isEmpty()) { + try { + result = Integer.parseInt(offsetString); + } catch (NumberFormatException nfe) { + // ignore, result stays 0 + } + } + + return (result > 0) ? result : 0; + } + + private int getCount(HttpServletRequest request) { + int result = 50; // Default entries count + + String countString = request.getParameter("count"); + if (countString != null && !countString.isEmpty()) { + try { + result = Integer.parseInt(countString); + } catch (NumberFormatException nfe) { + // ignore, result stays 50 + } + } + + return result; + } + + /** + * @return Returns the appenderName. + */ + public String getAppenderName() { + return appenderName; + } + + /** + * @param appenderName The appenderName to set. + */ + public void setAppenderName(String appenderName) { + this.appenderName = appenderName; + } + + public void setViewName(String viewName) { + this.viewName = viewName; + } +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/system/proxy/ProxySettingsController.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/system/proxy/ProxySettingsController.java new file mode 100644 index 00000000000..a1a8177423e --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/system/proxy/ProxySettingsController.java @@ -0,0 +1,100 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.common.webapp.system.proxy; + +import java.io.IOException; +import java.util.Map; + +import org.eclipse.rdf4j.common.app.net.ProxySettings; +import org.eclipse.rdf4j.common.webapp.util.HttpServerUtil; + +import jakarta.servlet.http.HttpServletResponse; + +/** + * @author Herko ter Horst + */ +public class ProxySettingsController { + + // FIXME: fix this non-implementation + private final ProxySettings PROXY_SETTINGS = null; + + private void setProxies(Map params, HttpServletResponse response) throws IOException { + boolean useProxies = HttpServerUtil.isTrue(HttpServerUtil.getPostDataParameter(params, "connection")); + + if (!useProxies) { + PROXY_SETTINGS.setProxiesEnabled(false); + } else { + String httpProxyHost = HttpServerUtil.getPostDataParameter(params, "httpProxyHost"); + String httpProxyPort = HttpServerUtil.getPostDataParameter(params, "httpProxyPort"); + if (!HttpServerUtil.isEmpty(httpProxyHost)) { + PROXY_SETTINGS.setHttpProxyHost(httpProxyHost); + if (checkPort(httpProxyPort)) { + PROXY_SETTINGS.setHttpProxyPort(httpProxyPort); + } + } + + String httpsProxyHost = HttpServerUtil.getPostDataParameter(params, "httpsProxyHost"); + String httpsProxyPort = HttpServerUtil.getPostDataParameter(params, "httpsProxyPort"); + if (!HttpServerUtil.isEmpty(httpsProxyHost)) { + PROXY_SETTINGS.setHttpsProxyHost(httpsProxyHost); + if (checkPort(httpsProxyPort)) { + PROXY_SETTINGS.setHttpsProxyPort(httpsProxyPort); + } + } + + String ftpProxyHost = HttpServerUtil.getPostDataParameter(params, "ftpProxyHost"); + String ftpProxyPort = HttpServerUtil.getPostDataParameter(params, "ftpProxyPort"); + if (!HttpServerUtil.isEmpty(ftpProxyHost)) { + PROXY_SETTINGS.setFtpProxyHost(ftpProxyHost); + if (checkPort(ftpProxyPort)) { + PROXY_SETTINGS.setFtpProxyPort(ftpProxyPort); + } + } + + String socksProxyHost = HttpServerUtil.getPostDataParameter(params, "socksProxyHost"); + String socksProxyPort = HttpServerUtil.getPostDataParameter(params, "socksProxyPort"); + if (!HttpServerUtil.isEmpty(socksProxyHost)) { + PROXY_SETTINGS.setSocksProxyHost(socksProxyHost); + if (checkPort(socksProxyPort)) { + PROXY_SETTINGS.setHttpProxyPort(socksProxyPort); + } + } + + String proxyExceptions = HttpServerUtil.getPostDataParameter(params, "proxyExceptions"); + if (!HttpServerUtil.isEmpty(proxyExceptions)) { + PROXY_SETTINGS.setNonProxyHostsStarting(proxyExceptions); + } + + PROXY_SETTINGS.setProxiesEnabled(true); + } + + PROXY_SETTINGS.save(); + } + + private boolean checkPort(String proxyPort) throws IOException { + boolean result = false; + + int port; + if (!HttpServerUtil.isEmpty(proxyPort)) { + try { + port = Integer.parseInt(proxyPort); + if (port > 0 || port < 65536) { + result = true; + } + } catch (NumberFormatException nfe) { + result = false; + } + } + + return result; + } + +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/util/HeaderElement.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/util/HeaderElement.java new file mode 100644 index 00000000000..9666e48a504 --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/util/HeaderElement.java @@ -0,0 +1,185 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.common.webapp.util; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import org.eclipse.rdf4j.common.text.StringUtil; + +/** + * An element in an HTTP header value. An HTTP header element has a value and zero or more parameters consisting of a + * key and a value. An example header element is audio/*; q=0.2. + */ +public class HeaderElement { + + /*----------------* + * Static methods * + *----------------*/ + + public static HeaderElement parse(String encodedValue) { + HeaderElement result = new HeaderElement(); + + List tokens = HttpServerUtil.splitHeaderString(encodedValue, ';'); + + if (!tokens.isEmpty()) { + // First token is the value of the header element + String token = tokens.get(0); + + // Remove any whitespace and double quotes from the token + token = StringUtil.trimDoubleQuotes(token.trim()); + + result.setValue(token); + + // Add parameters to the header element + for (int i = 1; i < tokens.size(); i++) { + token = (String) tokens.get(i); + + int splitIdx = token.indexOf('='); + + if (splitIdx == -1) { + // No value, only key + token = StringUtil.trimDoubleQuotes(token.trim()); + + // Ignore empty parameters + if (!token.isEmpty()) { + result.addParameter(token); + } + } else { + String key = token.substring(0, splitIdx).trim(); + String value = token.substring(splitIdx + 1).trim(); + value = StringUtil.trimDoubleQuotes(value); + result.addParameter(key, value); + } + } + } + + return result; + } + + /*-----------* + * Variables * + *-----------*/ + + private String value; + + private final List parameters; + + /*--------------* + * Constructors * + *--------------*/ + + public HeaderElement() { + this(""); + } + + public HeaderElement(String value) { + setValue(value); + parameters = new ArrayList<>(); + } + + /*---------* + * Methods * + *---------*/ + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + public int getParameterCount() { + return parameters.size(); + } + + public Parameter getParameter(int i) { + return parameters.get(i); + } + + public Parameter getParameter(String key) { + for (int i = 0; i < parameters.size(); i++) { + Parameter param = parameters.get(i); + if (param.getKey().equals(key)) { + return param; + } + } + + return null; + } + + public String getParameterValue(String key) { + Parameter param = getParameter(key); + + if (param != null) { + return param.getValue(); + } + + return null; + } + + public List getParameters() { + return Collections.unmodifiableList(parameters); + } + + public void addParameter(String key) { + addParameter(key, null); + } + + public void addParameter(String key, String value) { + addParameter(new Parameter(key, value)); + } + + public void addParameter(Parameter param) { + parameters.add(param); + } + + public Parameter removeParameter(int idx) { + return parameters.remove(idx); + } + + public boolean removeParameter(Parameter param) { + return parameters.remove(param); + } + + @Override + public boolean equals(Object obj) { + if (obj instanceof HeaderElement) { + HeaderElement other = (HeaderElement) obj; + + return value.equals(other.getValue()) && parameters.equals(other.getParameters()); + } + + return false; + } + + @Override + public int hashCode() { + return value.hashCode(); + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(32); + sb.append(value); + + for (int i = 0; i < parameters.size(); i++) { + Parameter param = parameters.get(i); + + sb.append("; "); + sb.append(param.toString()); + } + + return sb.toString(); + } +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/util/HttpServerUtil.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/util/HttpServerUtil.java new file mode 100644 index 00000000000..d34287dcc11 --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/util/HttpServerUtil.java @@ -0,0 +1,383 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.common.webapp.util; + +import java.util.ArrayList; +import java.util.Enumeration; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import jakarta.servlet.http.HttpServletRequest; + +public class HttpServerUtil { + + /** + * Extracts the MIME type from the specified content type string. This method parses the content type string and + * returns just the MIME type, ignoring any parameters that are included. + * + * @param contentType A content type string, e.g. application/xml; charset=utf-8 . + * @return The MIME type part of the specified content type string, or null if the specified content type + * string was null. + */ + public static String getMIMEType(String contentType) { + if (contentType == null) { + return null; + } + + return HeaderElement.parse(contentType).getValue(); + } + + /** + * Selects from a set of MIME types, the MIME type that has the highest quality score when matched with the Accept + * headers in the supplied request. + * + * @param mimeTypes The set of available MIME types. + * @param request The request to match the MIME types against. + * @return The MIME type that best matches the types that the client finds acceptable, or null in case no + * acceptable MIME type could be found. + */ + public static String selectPreferredMIMEType(Iterator mimeTypes, HttpServletRequest request) { + List acceptElements = getHeaderElements(request, "Accept"); + + if (acceptElements.isEmpty()) { + // Client does not specify any requirements, return first MIME type + // from the list + if (mimeTypes.hasNext()) { + return mimeTypes.next(); + } else { + return null; + } + } + + String result = null; + HeaderElement matchingAcceptType = null; + + double highestQuality = 0.0; + + while (mimeTypes.hasNext()) { + String mimeType = mimeTypes.next(); + HeaderElement acceptType = matchAcceptHeader(mimeType, acceptElements); + + if (acceptType != null) { + // quality defaults to 1.0 + double quality = 1.0; + + String qualityStr = acceptType.getParameterValue("q"); + if (qualityStr != null) { + try { + quality = Double.parseDouble(qualityStr); + } catch (NumberFormatException e) { + // Illegal quality value, assume it has a different meaning + // and ignore it + } + } + + if (quality > highestQuality) { + result = mimeType; + matchingAcceptType = acceptType; + highestQuality = quality; + } else if (quality == highestQuality) { + // found a match with equal quality preference. check if the + // accept type is more specific + // than the previous match. + if (isMoreSpecificType(acceptType, matchingAcceptType)) { + result = mimeType; + matchingAcceptType = acceptType; + } + } + } + } + + return result; + } + + /** + * Checks if the first supplied MIME type is more specific than the second supplied MIME type. + * + * @param leftMimeTypeElem + * @param rightMimeTypeElem + * @return true iff leftMimeTypeElem is a more specific MIME type spec than rightMimeTypeElem, false otherwise. + */ + private static boolean isMoreSpecificType(HeaderElement leftMimeTypeElem, HeaderElement rightMimeTypeElem) { + + String[] leftMimeType = splitMIMEType(leftMimeTypeElem.getValue()); + String[] rightMimeType = splitMIMEType(rightMimeTypeElem.getValue()); + + if (rightMimeType != null) { + if (rightMimeType[1].equals("*")) { + if (!leftMimeType[1].equals("*")) { + return true; + } + } + if (rightMimeType[0].equals("*")) { + if (!leftMimeType[0].equals("*")) { + return true; + } + } + + return false; + } else { + return true; + } + } + + private static String[] splitMIMEType(String mimeTypeString) { + int slashIdx = mimeTypeString.indexOf('/'); + if (slashIdx > 0) { + String type = mimeTypeString.substring(0, slashIdx); + String subType = mimeTypeString.substring(slashIdx + 1); + return new String[] { type, subType }; + } else { + // invalid mime type + return null; + } + } + + /** + * Gets the elements of the request header with the specified name. + * + * @param request The request to get the header from. + * @param headerName The name of the header to get the elements of. + * @return A List of {@link HeaderElement} objects. + */ + public static List getHeaderElements(HttpServletRequest request, String headerName) { + List elemList = new ArrayList<>(8); + + @SuppressWarnings("unchecked") + Enumeration headerValues = request.getHeaders(headerName); + while (headerValues.hasMoreElements()) { + String value = headerValues.nextElement(); + + List subValues = splitHeaderString(value, ','); + + for (String subValue : subValues) { + // Ignore any empty header elements + subValue = subValue.trim(); + if (!subValue.isEmpty()) { + elemList.add(HeaderElement.parse(subValue)); + } + } + } + + return elemList; + } + + /** + * Splits the supplied string into sub parts using the specified splitChar as a separator, ignoring occurrences of + * this character inside quoted strings. + * + * @param s The header string to split into sub parts. + * @param splitChar The character to use as separator. + * @return A List of Strings. + */ + public static List splitHeaderString(String s, char splitChar) { + List result = new ArrayList<>(8); + + boolean parsingQuotedString = false; + int i, startIdx = 0; + + for (i = 0; i < s.length(); i++) { + char c = s.charAt(i); + + if (c == splitChar && !parsingQuotedString) { + result.add(s.substring(startIdx, i)); + startIdx = i + 1; + } else if (c == '"') { + parsingQuotedString = !parsingQuotedString; + } + } + + if (startIdx < s.length()) { + result.add(s.substring(startIdx)); + } + + return result; + } + + /** + * Tries to match the specified MIME type spec against the list of Accept header elements, returning the applicable + * header element if available. + * + * @param mimeTypeSpec The MIME type to determine the quality for, e.g. "text/plain" or "application/xml; + * charset=utf-8". + * @param acceptElements A List of {@link HeaderElement} objects. + * @return The Accept header element that matches the MIME type spec most closely, or null if no such + * header element could be found. + */ + public static HeaderElement matchAcceptHeader(String mimeTypeSpec, List acceptElements) { + HeaderElement mimeTypeElem = HeaderElement.parse(mimeTypeSpec); + + while (mimeTypeElem != null) { + for (HeaderElement acceptElem : acceptElements) { + if (matchesAcceptHeader(mimeTypeElem, acceptElem)) { + return acceptElem; + } + } + + // No match found, generalize the MIME type spec and try again + mimeTypeElem = generalizeMIMEType(mimeTypeElem); + } + + return null; + } + + private static boolean matchesAcceptHeader(HeaderElement mimeTypeElem, HeaderElement acceptElem) { + if (!mimeTypeElem.getValue().equals(acceptElem.getValue())) { + return false; + } + + // Values match, check parameters + if (mimeTypeElem.getParameterCount() > acceptElem.getParameterCount()) { + return false; + } + + for (int i = 0; i < mimeTypeElem.getParameterCount(); i++) { + if (!mimeTypeElem.getParameter(i).equals(acceptElem.getParameter(i))) { + return false; + } + } + + return true; + } + + /** + * Generalizes a MIME type element. The following steps are taken for generalization: + *

    + *
  • If the MIME type element has one or more parameters, the last parameter is removed. + *
  • Otherwise, if the MIME type element's subtype is not equal to '*' then it is set to this value. + *
  • Otherwise, if the MIME type element's type is not equal to '*' then it is set to this value. + *
  • Otherwise, the MIME type is equal to "*&slash;*" and cannot be generalized any further; null is + * returned. + *
+ *

+ * Example generalizations: + *

+ * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
inputresult
application/xml; charset=utf-8application/xml
application/xmlapplication/*
application/*&slash;*
&slash;*null
+ * + * @param mimeTypeElem The MIME type element that should be generalized. + * @return The generalized MIME type element, or null if it could not be generalized any further. + */ + private static HeaderElement generalizeMIMEType(HeaderElement mimeTypeElem) { + int parameterCount = mimeTypeElem.getParameterCount(); + if (parameterCount > 0) { + // remove last parameter + mimeTypeElem.removeParameter(parameterCount - 1); + } else { + String mimeType = mimeTypeElem.getValue(); + + int slashIdx = mimeType.indexOf('/'); + if (slashIdx > 0) { + String type = mimeType.substring(0, slashIdx); + String subType = mimeType.substring(slashIdx + 1); + + if (!subType.equals("*")) { + // generalize subtype + mimeTypeElem.setValue(type + "/*"); + } else if (!type.equals("*")) { + // generalize type + mimeTypeElem.setValue("*/*"); + } else { + // Cannot generalize any further + mimeTypeElem = null; + } + } else { + // invalid MIME type + mimeTypeElem = null; + } + } + + return mimeTypeElem; + } + + /** + * Gets the trimmed value of a request parameter as a String. + * + * @return The trimmed value, or null if the parameter does not exist. + */ + public static String getPostDataParameter(Map formData, String name) { + String result = null; + + try { + Object param = formData.get(name); + if (param instanceof String[]) { + String[] paramArray = (String[]) param; + if (paramArray.length > 0) { + result = paramArray[0]; + } + } else if (param instanceof String) { + result = (String) param; + } + + if (result != null) { + result = result.trim(); + } + } catch (ClassCastException cce) { + // ignore, return null + } + + return result; + } + + /** + * @return true if the string is either null or equal to "" + */ + public static boolean isEmpty(String string) { + boolean result = false; + if (string == null || string.trim().isEmpty()) { + result = true; + } + return result; + } + + /** + * @return true if the string is !isEmpty and equal to "true" + */ + public static boolean isTrue(String string) { + boolean result = false; + if (!isEmpty(string) && (string.equalsIgnoreCase("true") || string.equalsIgnoreCase("on"))) { + result = true; + } + return result; + } + + /** + * @return true if the string is !isEmpty and equal to "false" + */ + public static boolean isFalse(String string) { + boolean result = false; + if (!isEmpty(string) && (string.equalsIgnoreCase("false") || string.equalsIgnoreCase("off"))) { + result = true; + } + return result; + } +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/util/Parameter.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/util/Parameter.java new file mode 100644 index 00000000000..2739a44d845 --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/util/Parameter.java @@ -0,0 +1,58 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.common.webapp.util; + +/** + * A parameter consisting of a key and a value, which are both strings. + */ +public class Parameter { + + private final String key; + + private final String value; + + public Parameter(String key, String value) { + this.key = key; + this.value = value; + } + + public String getKey() { + return key; + } + + public String getValue() { + return value; + } + + @Override + public boolean equals(Object obj) { + if (obj instanceof Parameter) { + Parameter other = (Parameter) obj; + return key.equals(other.getKey()) && value.equals(other.getValue()); + } + + return false; + } + + @Override + public int hashCode() { + return key.hashCode(); + } + + @Override + public String toString() { + if (value == null) { + return key; + } else { + return key + "=" + value; + } + } +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/views/EmptySuccessView.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/views/EmptySuccessView.java new file mode 100644 index 00000000000..6aed7fc17fc --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/views/EmptySuccessView.java @@ -0,0 +1,52 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.common.webapp.views; + +import static jakarta.servlet.http.HttpServletResponse.SC_NO_CONTENT; + +import java.util.Map; + +import org.springframework.web.servlet.View; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; + +/** + * @author Herko ter Horst + */ +public class EmptySuccessView implements View { + + private static final EmptySuccessView INSTANCE = new EmptySuccessView(); + + public static EmptySuccessView getInstance() { + return INSTANCE; + } + + private EmptySuccessView() { + } + + @Override + public String getContentType() { + return null; + } + + @SuppressWarnings("rawtypes") + @Override + public void render(Map model, HttpServletRequest request, HttpServletResponse response) throws Exception { + try { + // Indicate success with a 204 NO CONTENT response + response.setStatus(SC_NO_CONTENT); + } finally { + response.getOutputStream().close(); + } + } + +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/views/SimpleCustomResponseView.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/views/SimpleCustomResponseView.java new file mode 100644 index 00000000000..135c02f33a5 --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/views/SimpleCustomResponseView.java @@ -0,0 +1,77 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.common.webapp.views; + +import java.io.InputStream; +import java.util.Map; + +import org.eclipse.rdf4j.common.io.IOUtil; +import org.springframework.web.servlet.View; + +import jakarta.servlet.ServletOutputStream; +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; + +/** + * @author Herko ter Horst + */ +public class SimpleCustomResponseView implements View { + + public static final String SC_KEY = "sc"; + + public static final String CONTENT_KEY = "content"; + + public static final String CONTENT_LENGTH_KEY = "contentLength"; + + public static final String CONTENT_TYPE_KEY = "contentType"; + + private static final int DEFAULT_SC = HttpServletResponse.SC_OK; + + private static final SimpleCustomResponseView INSTANCE = new SimpleCustomResponseView(); + + public static SimpleCustomResponseView getInstance() { + return INSTANCE; + } + + @Override + public String getContentType() { + return null; + } + + @SuppressWarnings("rawtypes") + @Override + public void render(Map model, HttpServletRequest request, HttpServletResponse response) throws Exception { + int sc = DEFAULT_SC; + if (model.containsKey(SC_KEY)) { + sc = (Integer) model.get(SC_KEY); + } + String contentType = (String) model.get(CONTENT_TYPE_KEY); + Integer contentLength = (Integer) model.get(CONTENT_LENGTH_KEY); + + try (InputStream content = (InputStream) model.get(CONTENT_KEY)) { + response.setStatus(sc); + + try (ServletOutputStream out = response.getOutputStream()) { + if (content != null) { + if (contentType != null) { + response.setContentType(contentType); + } + if (contentLength != null) { + response.setContentLength(contentLength); + } + IOUtil.transfer(content, out); + } else { + response.setContentLength(0); + } + } + } + } +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/views/SimpleResponseView.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/views/SimpleResponseView.java new file mode 100644 index 00000000000..8466a98687d --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/common/webapp/views/SimpleResponseView.java @@ -0,0 +1,82 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.common.webapp.views; + +import java.io.OutputStream; +import java.nio.charset.StandardCharsets; +import java.util.Map; + +import org.springframework.web.servlet.View; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; + +/** + * @author Herko ter Horst + */ +public class SimpleResponseView implements View { + + public static final String SC_KEY = "sc"; + + public static final String CONTENT_KEY = "content"; + + private static final int DEFAULT_SC = HttpServletResponse.SC_OK; + + private static final String CONTENT_TYPE = "text/plain; charset=UTF-8"; + + public static final String CUSTOM_HEADERS_KEY = "headers"; + + private static final SimpleResponseView INSTANCE = new SimpleResponseView(); + + public static SimpleResponseView getInstance() { + return INSTANCE; + } + + private SimpleResponseView() { + } + + @Override + public String getContentType() { + return CONTENT_TYPE; + } + + @SuppressWarnings("rawtypes") + @Override + public void render(Map model, HttpServletRequest request, HttpServletResponse response) throws Exception { + Integer sc = (Integer) model.get(SC_KEY); + if (sc == null) { + sc = DEFAULT_SC; + } + response.setStatus(sc.intValue()); + + response.setContentType(getContentType()); + + if (model.containsKey(CUSTOM_HEADERS_KEY)) { + Map customHeaders = (Map) model.get(CUSTOM_HEADERS_KEY); + if (customHeaders != null) { + for (String headerName : customHeaders.keySet()) { + response.setHeader(headerName, customHeaders.get(headerName)); + } + } + } + + try (OutputStream out = response.getOutputStream()) { + String content = (String) model.get(CONTENT_KEY); + if (content != null) { + byte[] contentBytes = content.getBytes(StandardCharsets.UTF_8); + response.setContentLength(contentBytes.length); + out.write(contentBytes); + } else { + response.setContentLength(0); + } + } + } +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/ClientHTTPException.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/ClientHTTPException.java new file mode 100644 index 00000000000..0b7a22e77f1 --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/ClientHTTPException.java @@ -0,0 +1,93 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server; + +import java.net.HttpURLConnection; + +/** + * HTTP-related exception indicating that an HTTP client has erred. Status codes for these types of errors are in the + * 4xx range. The default status code for constructors without a statusCode parameter is 400 Bad + * Request. + * + * @author Arjohn Kampman + */ +public class ClientHTTPException extends HTTPException { + + private static final long serialVersionUID = 7722604284325312749L; + + private static final int DEFAULT_STATUS_CODE = HttpURLConnection.HTTP_BAD_REQUEST; + + /** + * Creates a {@link ClientHTTPException} with status code 400 "Bad Request". + */ + public ClientHTTPException() { + this(DEFAULT_STATUS_CODE); + } + + /** + * Creates a {@link ClientHTTPException} with status code 400 "Bad Request". + */ + public ClientHTTPException(String msg) { + this(DEFAULT_STATUS_CODE, msg); + } + + /** + * Creates a {@link ClientHTTPException} with status code 400 "Bad Request". + */ + public ClientHTTPException(String msg, Throwable t) { + this(DEFAULT_STATUS_CODE, t); + } + + /** + * Creates a {@link ClientHTTPException} with the specified status code. + * + * @throws IllegalArgumentException If statusCode is not in the 4xx range. + */ + public ClientHTTPException(int statusCode) { + super(statusCode); + } + + /** + * Creates a {@link ClientHTTPException} with the specified status code. + * + * @throws IllegalArgumentException If statusCode is not in the 4xx range. + */ + public ClientHTTPException(int statusCode, String message) { + super(statusCode, message); + } + + /** + * Creates a {@link ClientHTTPException} with the specified status code. + * + * @throws IllegalArgumentException If statusCode is not in the 4xx range. + */ + public ClientHTTPException(int statusCode, String message, Throwable t) { + super(statusCode, message, t); + } + + /** + * Creates a {@link ClientHTTPException} with the specified status code. + * + * @throws IllegalArgumentException If statusCode is not in the 4xx range. + */ + public ClientHTTPException(int statusCode, Throwable t) { + super(statusCode, t); + } + + @Override + protected void setStatusCode(int statusCode) { + if (statusCode < 400 || statusCode > 499) { + throw new IllegalArgumentException("Status code must be in the 4xx range, is: " + statusCode); + } + + super.setStatusCode(statusCode); + } +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/HTTPException.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/HTTPException.java new file mode 100644 index 00000000000..73d7a067283 --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/HTTPException.java @@ -0,0 +1,51 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server; + +/** + * HTTP-related exception that includes the relevant HTTP status code. + * + * @author Arjohn Kampman + */ +public class HTTPException extends Exception { + + private static final long serialVersionUID = 1356463348553827230L; + + private int statusCode; + + public HTTPException(int statusCode) { + super(); + setStatusCode(statusCode); + } + + public HTTPException(int statusCode, String message) { + super(message); + setStatusCode(statusCode); + } + + public HTTPException(int statusCode, String message, Throwable t) { + super(message, t); + setStatusCode(statusCode); + } + + public HTTPException(int statusCode, Throwable t) { + super(t); + setStatusCode(statusCode); + } + + public final int getStatusCode() { + return statusCode; + } + + protected void setStatusCode(int statusCode) { + this.statusCode = statusCode; + } +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/ProtocolExceptionResolver.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/ProtocolExceptionResolver.java new file mode 100644 index 00000000000..86624b9d129 --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/ProtocolExceptionResolver.java @@ -0,0 +1,97 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server; + +import java.io.StringWriter; +import java.util.HashMap; +import java.util.Map; + +import org.eclipse.rdf4j.common.exception.ValidationException; +import org.eclipse.rdf4j.common.webapp.views.SimpleResponseView; +import org.eclipse.rdf4j.model.Model; +import org.eclipse.rdf4j.rio.RDFFormat; +import org.eclipse.rdf4j.rio.Rio; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.web.servlet.HandlerExceptionResolver; +import org.springframework.web.servlet.ModelAndView; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; + +/** + * Simple resolver for Exceptions: returns the correct response code and message to the client. + * + * @author Herko ter Horst + */ +public class ProtocolExceptionResolver implements HandlerExceptionResolver { + + private final Logger logger = LoggerFactory.getLogger(this.getClass()); + + @Override + public ModelAndView resolveException(HttpServletRequest request, HttpServletResponse response, Object handler, + Exception exception) { + logger.debug("ProtocolExceptionResolver.resolveException() called"); + + Map model = new HashMap<>(); + + int statusCode = HttpServletResponse.SC_INTERNAL_SERVER_ERROR; + String errMsg = exception.getMessage(); + + if (exception instanceof HTTPException) { + HTTPException httpExc = (HTTPException) exception; + statusCode = httpExc.getStatusCode(); + + if (exception instanceof ClientHTTPException) { + logger.info("Client sent bad request ( " + statusCode + ")", exception); + } else { + logger.error("Error while handling request (" + statusCode + ")", exception); + } + } else { + logger.error("Error while handling request", exception); + } + + int depth = 10; + Throwable temp = exception; + while (!(temp instanceof ValidationException)) { + if (depth-- == 0) { + break; + } + if (temp == null) { + break; + } + temp = temp.getCause(); + } + + if (temp instanceof ValidationException) { + // This is currently just a simple fix that causes the validation report to be printed. + // This should not be the final solution. + Model validationReportModel = ((ValidationException) temp).validationReportAsModel(); + + StringWriter stringWriter = new StringWriter(); + + // We choose RDFJSON because this format doesn't rename blank nodes. + Rio.write(validationReportModel, stringWriter, RDFFormat.RDFJSON); + + statusCode = HttpServletResponse.SC_CONFLICT; + errMsg = stringWriter.toString(); + + Map headers = new HashMap<>(); + headers.put("Content-Type", "application/shacl-validation-report+rdf+json"); + model.put(SimpleResponseView.CUSTOM_HEADERS_KEY, headers); + } + + model.put(SimpleResponseView.SC_KEY, statusCode); + model.put(SimpleResponseView.CONTENT_KEY, errMsg); + + return new ModelAndView(SimpleResponseView.getInstance(), model); + } +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/ProtocolUtil.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/ProtocolUtil.java new file mode 100644 index 00000000000..96533cdb0ae --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/ProtocolUtil.java @@ -0,0 +1,226 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server; + +import static jakarta.servlet.http.HttpServletResponse.SC_BAD_REQUEST; +import static jakarta.servlet.http.HttpServletResponse.SC_NOT_ACCEPTABLE; + +import java.util.Collection; +import java.util.Enumeration; +import java.util.LinkedHashSet; +import java.util.Optional; + +import org.eclipse.rdf4j.common.lang.FileFormat; +import org.eclipse.rdf4j.common.lang.service.FileFormatServiceRegistry; +import org.eclipse.rdf4j.common.webapp.util.HttpServerUtil; +import org.eclipse.rdf4j.http.protocol.Protocol; +import org.eclipse.rdf4j.http.protocol.error.ErrorInfo; +import org.eclipse.rdf4j.http.protocol.error.ErrorType; +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.Resource; +import org.eclipse.rdf4j.model.Value; +import org.eclipse.rdf4j.model.ValueFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; + +/** + * Utilities to help with the transition between HTTP requests/responses and values expected by the protocol. + * + * @author Herko ter Horst + * @author Arjohn Kampman + */ +public class ProtocolUtil { + + public static Value parseValueParam(HttpServletRequest request, String paramName, ValueFactory vf) + throws ClientHTTPException { + String paramValue = request.getParameter(paramName); + try { + return Protocol.decodeValue(paramValue, vf); + } catch (IllegalArgumentException e) { + throw new ClientHTTPException(SC_BAD_REQUEST, + "Invalid value for parameter '" + paramName + "': " + paramValue); + } + } + + public static Resource parseResourceParam(HttpServletRequest request, String paramName, ValueFactory vf) + throws ClientHTTPException { + String paramValue = request.getParameter(paramName); + try { + return Protocol.decodeResource(paramValue, vf); + } catch (IllegalArgumentException e) { + throw new ClientHTTPException(SC_BAD_REQUEST, + "Invalid value for parameter '" + paramName + "': " + paramValue); + } + } + + public static IRI parseURIParam(HttpServletRequest request, String paramName, ValueFactory vf) + throws ClientHTTPException { + String paramValue = request.getParameter(paramName); + try { + return Protocol.decodeURI(paramValue, vf); + } catch (IllegalArgumentException e) { + throw new ClientHTTPException(SC_BAD_REQUEST, + "Invalid value for parameter '" + paramName + "': " + paramValue); + } + } + + public static IRI parseGraphParam(HttpServletRequest request, ValueFactory vf) throws ClientHTTPException { + String paramValue = request.getParameter(Protocol.GRAPH_PARAM_NAME); + if (paramValue == null) { + return null; + } + + try { + return Protocol.decodeURI("<" + paramValue + ">", vf); + } catch (IllegalArgumentException e) { + throw new ClientHTTPException(SC_BAD_REQUEST, + "Invalid value for parameter '" + Protocol.GRAPH_PARAM_NAME + "': " + paramValue); + } + } + + public static Resource[] parseContextParam(HttpServletRequest request, String paramName, ValueFactory vf) + throws ClientHTTPException { + String[] paramValues = request.getParameterValues(paramName); + try { + return Protocol.decodeContexts(paramValues, vf); + } catch (IllegalArgumentException e) { + throw new ClientHTTPException(SC_BAD_REQUEST, + "Invalid value for parameter '" + paramName + "': " + e.getMessage()); + } + } + + public static boolean parseBooleanParam(HttpServletRequest request, String paramName, boolean defaultValue) { + String paramValue = request.getParameter(paramName); + if (paramValue == null) { + return defaultValue; + } else { + return Boolean.parseBoolean(paramValue); + } + } + + public static long parseLongParam(HttpServletRequest request, String paramName, long defaultValue) + throws ClientHTTPException { + String paramValue = request.getParameter(paramName); + if (paramValue == null) { + return defaultValue; + } else { + try { + return Long.parseLong(paramValue); + } catch (IllegalArgumentException e) { + throw new ClientHTTPException(SC_BAD_REQUEST, + "Invalid value for parameter '" + paramName + "': " + e.getMessage()); + } + } + } + + /** + * Logs all request parameters of the supplied request. + */ + public static void logRequestParameters(HttpServletRequest request) { + Logger logger = LoggerFactory.getLogger(ProtocolUtil.class); + if (logger.isDebugEnabled()) { + @SuppressWarnings("unchecked") + Enumeration paramNames = request.getParameterNames(); + while (paramNames.hasMoreElements()) { + String name = paramNames.nextElement(); + for (String value : request.getParameterValues(name)) { + logger.debug("{}=\"{}\"", name, value); + } + } + } + } + + public static S getAcceptableService(HttpServletRequest request, + HttpServletResponse response, FileFormatServiceRegistry serviceRegistry) throws ClientHTTPException { + // Accept-parameter takes precedence over request headers + String mimeType = request.getParameter(Protocol.ACCEPT_PARAM_NAME); + boolean hasAcceptParam = mimeType != null; + + if (mimeType == null) { + // Find an acceptable MIME type based on the request headers + logAcceptableFormats(request); + + Collection mimeTypes = new LinkedHashSet<>(16); + // Prefer the default mime types, explicitly before non-default + for (FileFormat format : serviceRegistry.getKeys()) { + mimeTypes.add(format.getDefaultMIMEType()); + } + for (FileFormat format : serviceRegistry.getKeys()) { + mimeTypes.addAll(format.getMIMETypes()); + } + + mimeType = HttpServerUtil.selectPreferredMIMEType(mimeTypes.iterator(), request); + + response.setHeader("Vary", "Accept"); + } + + if (mimeType != null) { + Optional format = serviceRegistry.getFileFormatForMIMEType(mimeType); + + if (format.isPresent()) { + return serviceRegistry.get(format.get()).get(); + } + } + + if (hasAcceptParam) { + ErrorInfo errInfo = new ErrorInfo(ErrorType.UNSUPPORTED_FILE_FORMAT, mimeType); + throw new ClientHTTPException(SC_BAD_REQUEST, errInfo.toString()); + } else { + // No acceptable format was found, send 406 as required by RFC 2616 + throw new ClientHTTPException(SC_NOT_ACCEPTABLE, "No acceptable file format found."); + } + } + + /** + * Reads the {@link Protocol#TIMEOUT_PARAM_NAME} parameter from the request and (if present) parses it into an + * integer value. + * + * @param request the {@link HttpServletRequest} to read the parameter from + * @return the value of the timeout parameter as an integer (representing the timeout time in seconds), or 0 if no + * timeout parameter is specified in the request. + * @throws ClientHTTPException if the value of the timeout parameter is not a valid integer. + */ + public static int parseTimeoutParam(HttpServletRequest request) throws ClientHTTPException { + final String timeoutParam = request.getParameter(Protocol.TIMEOUT_PARAM_NAME); + int maxExecutionTime = 0; + if (timeoutParam != null) { + try { + maxExecutionTime = Integer.parseInt(timeoutParam); + } catch (NumberFormatException e) { + throw new ClientHTTPException(SC_BAD_REQUEST, "Invalid timeout value: " + timeoutParam); + } + } + return maxExecutionTime; + } + + public static void logAcceptableFormats(HttpServletRequest request) { + Logger logger = LoggerFactory.getLogger(ProtocolUtil.class); + if (logger.isDebugEnabled()) { + StringBuilder acceptable = new StringBuilder(64); + + @SuppressWarnings("unchecked") + Enumeration acceptHeaders = request.getHeaders("Accept"); + + while (acceptHeaders.hasMoreElements()) { + acceptable.append(acceptHeaders.nextElement()); + + if (acceptHeaders.hasMoreElements()) { + acceptable.append(','); + } + } + + logger.debug("Acceptable formats: " + acceptable); + } + } +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/ServerHTTPException.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/ServerHTTPException.java new file mode 100644 index 00000000000..e1305b0e57c --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/ServerHTTPException.java @@ -0,0 +1,97 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server; + +import java.net.HttpURLConnection; + +/** + * HTTP-related exception indicating that an error occurred in a server. Status codes for these types of errors are in + * the 5xx range. The default status code for constructors without a statusCode parameter is 500 + * Internal Server Error. + * + * @author Arjohn Kampman + */ +public class ServerHTTPException extends HTTPException { + + private static final long serialVersionUID = -3949837199542648966L; + + private static final int DEFAULT_STATUS_CODE = HttpURLConnection.HTTP_INTERNAL_ERROR; + + /** + * Creates a {@link ServerHTTPException} with status code 500 "Internal Server Error". + */ + public ServerHTTPException() { + this(DEFAULT_STATUS_CODE); + } + + /** + * Creates a {@link ServerHTTPException} with status code 500 "Internal Server Error". + */ + public ServerHTTPException(String msg) { + this(DEFAULT_STATUS_CODE, msg); + } + + /** + * Creates a {@link ServerHTTPException} with status code 500 "Internal Server Error". + */ + public ServerHTTPException(String msg, Throwable t) { + this(DEFAULT_STATUS_CODE, t); + } + + /** + * Creates a {@link ServerHTTPException} with the specified status code. The supplied status code must be in the 5xx + * range. + * + * @throws IllegalArgumentException If statusCode is not in the 5xx range. + */ + public ServerHTTPException(int statusCode) { + super(statusCode); + } + + /** + * Creates a {@link ServerHTTPException} with the specified status code. The supplied status code must be in the 5xx + * range. + * + * @throws IllegalArgumentException If statusCode is not in the 5xx range. + */ + public ServerHTTPException(int statusCode, String message) { + super(statusCode, message); + } + + /** + * Creates a {@link ServerHTTPException} with the specified status code. The supplied status code must be in the 5xx + * range. + * + * @throws IllegalArgumentException If statusCode is not in the 5xx range. + */ + public ServerHTTPException(int statusCode, String message, Throwable t) { + super(statusCode, message, t); + } + + /** + * Creates a {@link ServerHTTPException} with the specified status code. The supplied status code must be in the 5xx + * range. + * + * @throws IllegalArgumentException If statusCode is not in the 5xx range. + */ + public ServerHTTPException(int statusCode, Throwable t) { + super(statusCode, t); + } + + @Override + protected void setStatusCode(int statusCode) { + if (statusCode < 500 || statusCode > 599) { + throw new IllegalArgumentException("Status code must be in the 5xx range, is: " + statusCode); + } + + super.setStatusCode(statusCode); + } +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/ServerInterceptor.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/ServerInterceptor.java new file mode 100644 index 00000000000..bc62e256747 --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/ServerInterceptor.java @@ -0,0 +1,93 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server; + +import java.util.UUID; +import java.util.concurrent.atomic.AtomicLong; + +import org.slf4j.MDC; +import org.springframework.web.servlet.HandlerInterceptor; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; + +/** + * Base class for single-use request interceptors. This implementation sets the thread name to something sensible at the + * start of the request handling and resets the name at the end. This is useful for logging frameworks that make use of + * thread names, such as Log4J. Should not be a singleton bean! Configure as inner bean in openrdf-servlet.xml + * + * @author Herko ter Horst + */ +public abstract class ServerInterceptor implements HandlerInterceptor { + + private static final String REQUEST_ID_KEY = "org.eclipse.rdf4j.requestId"; + private static final String PROCESS_ID = "process:" + UUID.randomUUID(); + + private static final AtomicLong requestNumber = new AtomicLong(0L); + + private volatile String origThreadName; + + private static String createRequestId() { + return PROCESS_ID + ":request:" + requestNumber.getAndIncrement(); + } + + @Override + public boolean preHandle(HttpServletRequest request, HttpServletResponse response, Object handler) + throws Exception { + origThreadName = Thread.currentThread().getName(); + Thread.currentThread().setName(getThreadName()); + MDC.put(REQUEST_ID_KEY, createRequestId()); + + setRequestAttributes(request); + + return HandlerInterceptor.super.preHandle(request, response, handler); + } + + @Override + public void afterCompletion(HttpServletRequest request, HttpServletResponse response, Object handler, + Exception exception) throws Exception { + try { + cleanUpResources(); + } finally { + MDC.remove(REQUEST_ID_KEY); + Thread.currentThread().setName(origThreadName); + } + } + + /** + * Determine the thread name to use. Called before the request is forwarded to a handler. + * + * @return a name that makes sense based on the request + * @throws ServerHTTPException if it was impossible to determine a name due to an internal error + */ + protected abstract String getThreadName() throws ServerHTTPException; + + /** + * Set attributes for this request. Called before the request is forwarded to a handler. By default, this method + * does nothing. + * + * @param request the request + * @throws ClientHTTPException if it was impossible to set one or more attributes due to a bad request on the part + * of the client + * @throws ServerHTTPException if it was impossible to set one or more attributes due to an internal error + */ + protected void setRequestAttributes(HttpServletRequest request) throws ClientHTTPException, ServerHTTPException { + } + + /** + * Clean up resources used in handling this request. Called after the request is handled and a the view is rendered + * (or an exception has occurred). By default, this method does nothing. + * + * @throws ServerHTTPException if some resources could not be cleaned up because of an internal error + */ + protected void cleanUpResources() throws ServerHTTPException { + } +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/protocol/ProtocolController.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/protocol/ProtocolController.java new file mode 100644 index 00000000000..3c4ab805950 --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/protocol/ProtocolController.java @@ -0,0 +1,43 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.protocol; + +import java.util.HashMap; +import java.util.Map; + +import org.eclipse.rdf4j.common.webapp.views.SimpleResponseView; +import org.eclipse.rdf4j.http.protocol.Protocol; +import org.springframework.context.ApplicationContextException; +import org.springframework.web.servlet.ModelAndView; +import org.springframework.web.servlet.mvc.AbstractController; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; + +/** + * Handles requests for protocol information. Currently returns the protocol version as plain text. + * + * @author Herko ter Horst + */ +public class ProtocolController extends AbstractController { + + public ProtocolController() throws ApplicationContextException { + setSupportedMethods(METHOD_GET, METHOD_HEAD); + } + + @Override + protected ModelAndView handleRequestInternal(HttpServletRequest request, HttpServletResponse response) + throws Exception { + Map model = new HashMap<>(); + model.put(SimpleResponseView.CONTENT_KEY, Protocol.VERSION); + return new ModelAndView(SimpleResponseView.getInstance(), model); + } +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/protocol/ProtocolInterceptor.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/protocol/ProtocolInterceptor.java new file mode 100644 index 00000000000..44bc2e5355d --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/protocol/ProtocolInterceptor.java @@ -0,0 +1,27 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.protocol; + +import org.eclipse.rdf4j.http.protocol.Protocol; +import org.eclipse.rdf4j.http.server.ServerInterceptor; + +/** + * Interceptor for protocol requests. Should not be a singleton bean! Configure as inner bean in openrdf-servlet.xml + * + * @author Herko ter Horst + */ +public class ProtocolInterceptor extends ServerInterceptor { + + @Override + protected String getThreadName() { + return Protocol.PROTOCOL; + } +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/AbstractRepositoryController.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/AbstractRepositoryController.java new file mode 100644 index 00000000000..974275f0c0d --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/AbstractRepositoryController.java @@ -0,0 +1,60 @@ +/******************************************************************************* + * Copyright (c) 2022 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.repository; + +import org.eclipse.rdf4j.http.server.repository.handler.QueryRequestHandler; +import org.eclipse.rdf4j.http.server.repository.handler.RepositoryRequestHandler; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.context.ApplicationContextException; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.servlet.ModelAndView; +import org.springframework.web.servlet.mvc.AbstractController; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; + +public abstract class AbstractRepositoryController extends AbstractController { + private final Logger logger = LoggerFactory.getLogger(this.getClass()); + + public AbstractRepositoryController() throws ApplicationContextException { + setSupportedMethods(METHOD_GET, METHOD_POST, "PUT", "DELETE", METHOD_HEAD); + } + + @Override + protected ModelAndView handleRequestInternal(HttpServletRequest request, HttpServletResponse response) + throws Exception { + + logger.debug("Request method: {}", request.getMethod()); + + RequestMethod requestMethod = RequestMethod.valueOf(request.getMethod()); + + switch (requestMethod) { + case DELETE: { + logger.debug("handleDeleteRepositoryRequest"); + return getRepositoryRequestHandler().handleDeleteRepositoryRequest(request); + } + case PUT: { + logger.debug("handleCreateOrUpdateRepositoryRequest"); + return getRepositoryRequestHandler().handleCreateOrUpdateRepositoryRequest(request); + } + } + + logger.debug("handleQueryRequest"); + + return getQueryRequestHandler().handleQueryRequest(request, requestMethod, response); + } + + protected abstract QueryRequestHandler getQueryRequestHandler(); + + protected abstract RepositoryRequestHandler getRepositoryRequestHandler(); + +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/BooleanQueryResultView.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/BooleanQueryResultView.java new file mode 100644 index 00000000000..5f3c3810545 --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/BooleanQueryResultView.java @@ -0,0 +1,76 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.repository; + +import static jakarta.servlet.http.HttpServletResponse.SC_OK; + +import java.io.IOException; +import java.io.OutputStream; +import java.util.Map; + +import org.eclipse.rdf4j.query.QueryResultHandlerException; +import org.eclipse.rdf4j.query.resultio.BooleanQueryResultFormat; +import org.eclipse.rdf4j.query.resultio.BooleanQueryResultWriter; +import org.eclipse.rdf4j.query.resultio.BooleanQueryResultWriterFactory; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; + +/** + * View used to render boolean query results. Renders results in a format specified using a parameter or Accept header. + * + * @author Arjohn Kampman + */ +public class BooleanQueryResultView extends QueryResultView { + + private static final BooleanQueryResultView INSTANCE = new BooleanQueryResultView(); + + public static BooleanQueryResultView getInstance() { + return INSTANCE; + } + + private BooleanQueryResultView() { + } + + @Override + public String getContentType() { + return null; + } + + @SuppressWarnings("rawtypes") + @Override + protected void renderInternal(Map model, HttpServletRequest request, HttpServletResponse response) + throws IOException { + BooleanQueryResultWriterFactory brWriterFactory = (BooleanQueryResultWriterFactory) model.get(FACTORY_KEY); + BooleanQueryResultFormat brFormat = brWriterFactory.getBooleanQueryResultFormat(); + + response.setStatus(SC_OK); + setContentType(response, brFormat); + setContentDisposition(model, response, brFormat); + + boolean headersOnly = (Boolean) model.get(HEADERS_ONLY); + + if (!headersOnly) { + try (OutputStream out = response.getOutputStream()) { + BooleanQueryResultWriter qrWriter = brWriterFactory.getWriter(out); + boolean value = (Boolean) model.get(QUERY_RESULT_KEY); + qrWriter.handleBoolean(value); + } catch (QueryResultHandlerException e) { + if (e.getCause() != null && e.getCause() instanceof IOException) { + throw (IOException) e.getCause(); + } else { + throw new IOException(e); + } + } + } + logEndOfRequest(request); + } +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/ExplainQueryResultView.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/ExplainQueryResultView.java new file mode 100644 index 00000000000..5a3fa458244 --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/ExplainQueryResultView.java @@ -0,0 +1,64 @@ +/******************************************************************************* + * Copyright (c) 2025 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.repository; + +import java.io.IOException; +import java.io.PrintWriter; +import java.util.Map; + +import org.apache.http.HttpStatus; +import org.eclipse.rdf4j.http.protocol.Protocol; +import org.eclipse.rdf4j.query.explanation.Explanation; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; + +public class ExplainQueryResultView extends QueryResultView { + + private static final String MIME_PLAIN = "text/plain"; + private static final String MIME_JSON = "application/json"; + + @Override + protected void renderInternal( + final Map model, final HttpServletRequest request, final HttpServletResponse response) throws IOException { + + String mimeType = getRequestedMimeType(request); + Explanation explanation = (Explanation) model.get(QUERY_EXPLAIN_RESULT_KEY); + + if (explanation == null) { + response.sendError(HttpServletResponse.SC_BAD_REQUEST, "No explanation result found."); + return; + } + + response.setCharacterEncoding("UTF-8"); + response.setStatus(HttpStatus.SC_OK); + + try (PrintWriter writer = response.getWriter()) { + if (MIME_JSON.equals(mimeType)) { + response.setContentType(MIME_JSON); + writer.write(explanation.toJson()); + } else if (MIME_PLAIN.equals(mimeType) || mimeType == null || mimeType.isEmpty()) { + response.setContentType(MIME_PLAIN); + writer.write(explanation.toString()); + } else { + response.sendError( + HttpServletResponse.SC_BAD_REQUEST, + "Unsupported MIME type: " + mimeType + ". Must be either text/plain or application/json." + ); + } + } + } + + private String getRequestedMimeType(HttpServletRequest request) { + String mimeType = request.getParameter(Protocol.ACCEPT_PARAM_NAME); + return (mimeType != null) ? mimeType : request.getHeader("Accept"); + } +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/GraphQueryResultView.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/GraphQueryResultView.java new file mode 100644 index 00000000000..4c0b322dee9 --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/GraphQueryResultView.java @@ -0,0 +1,94 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.repository; + +import static jakarta.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR; +import static jakarta.servlet.http.HttpServletResponse.SC_OK; +import static jakarta.servlet.http.HttpServletResponse.SC_SERVICE_UNAVAILABLE; + +import java.io.IOException; +import java.io.OutputStream; +import java.util.Map; + +import org.eclipse.rdf4j.query.GraphQueryResult; +import org.eclipse.rdf4j.query.QueryEvaluationException; +import org.eclipse.rdf4j.query.QueryInterruptedException; +import org.eclipse.rdf4j.query.QueryResults; +import org.eclipse.rdf4j.rio.RDFFormat; +import org.eclipse.rdf4j.rio.RDFHandlerException; +import org.eclipse.rdf4j.rio.RDFWriter; +import org.eclipse.rdf4j.rio.RDFWriterFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; + +/** + * View used to render graph query results. Renders the graph as RDF using a serialization specified using a parameter + * or Accept header. + * + * @author Herko ter Horst + * @author Arjohn Kampman + */ +public class GraphQueryResultView extends QueryResultView { + + private final Logger logger = LoggerFactory.getLogger(this.getClass()); + + private static final GraphQueryResultView INSTANCE = new GraphQueryResultView(); + + public static GraphQueryResultView getInstance() { + return INSTANCE; + } + + private GraphQueryResultView() { + } + + @Override + public String getContentType() { + return null; + } + + @SuppressWarnings("rawtypes") + @Override + protected void renderInternal(Map model, HttpServletRequest request, HttpServletResponse response) + throws IOException { + RDFWriterFactory rdfWriterFactory = (RDFWriterFactory) model.get(FACTORY_KEY); + RDFFormat rdfFormat = rdfWriterFactory.getRDFFormat(); + + response.setStatus(SC_OK); + setContentType(response, rdfFormat); + setContentDisposition(model, response, rdfFormat); + + boolean headersOnly = (Boolean) model.get(HEADERS_ONLY); + + if (!headersOnly) { + try (OutputStream out = response.getOutputStream()) { + // ensure we handle exceptions _before_ closing the stream + try { + RDFWriter rdfWriter = rdfWriterFactory.getWriter(out); + GraphQueryResult graphQueryResult = (GraphQueryResult) model.get(QUERY_RESULT_KEY); + QueryResults.report(graphQueryResult, rdfWriter); + } catch (QueryInterruptedException e) { + logger.error("Query interrupted", e); + response.sendError(SC_SERVICE_UNAVAILABLE, "Query evaluation took too long"); + } catch (QueryEvaluationException e) { + logger.error("Query evaluation error", e); + response.sendError(SC_INTERNAL_SERVER_ERROR, "Query evaluation error: " + e.getMessage()); + } catch (RDFHandlerException e) { + logger.error("Serialization error", e); + response.sendError(SC_INTERNAL_SERVER_ERROR, "Serialization error: " + e.getMessage()); + } + } + } + logEndOfRequest(request); + } +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/QueryResultView.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/QueryResultView.java new file mode 100644 index 00000000000..1b3a6e48d5f --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/QueryResultView.java @@ -0,0 +1,118 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.repository; + +import static org.eclipse.rdf4j.http.protocol.Protocol.QUERY_PARAM_NAME; + +import java.io.IOException; +import java.nio.charset.Charset; +import java.util.Map; + +import org.eclipse.rdf4j.common.lang.FileFormat; +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.web.servlet.View; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; + +/** + * Base class for rendering query results. + * + * @author Herko ter Horst + * @author Arjohn Kampman + */ +public abstract class QueryResultView implements View { + + private final Logger logger = LoggerFactory.getLogger(this.getClass()); + + /** + * Key by which the query result is stored in the model. + */ + public static final String QUERY_RESULT_KEY = "queryResult"; + + /** + * Key by which the query result explanation is stored in the model. + */ + public static final String QUERY_EXPLAIN_RESULT_KEY = "explainResult"; + + /** + * Key by which the query result writer factory is stored in the model. + */ + public static final String FACTORY_KEY = "factory"; + + /** + * Key by which a filename hint is stored in the model. The filename hint may be used to present the client with a + * suggestion for a filename to use for storing the result. + */ + public static final String FILENAME_HINT_KEY = "filenameHint"; + + /** + * Key by which the current {@link RepositoryConnection} is stored in the Model. If this is present, the + * {@link QueryResultView} will take care to close the connection after processing the query result. + */ + public static final String CONNECTION_KEY = "connection"; + + public static final String HEADERS_ONLY = "headersOnly"; + + @SuppressWarnings("rawtypes") + @Override + public final void render(Map model, HttpServletRequest request, HttpServletResponse response) throws IOException { + try { + renderInternal(model, request, response); + } finally { + RepositoryConnection conn = (RepositoryConnection) model.get(CONNECTION_KEY); + if (conn != null) { + conn.close(); + } + } + } + + @SuppressWarnings("rawtypes") + protected abstract void renderInternal(Map model, HttpServletRequest request, HttpServletResponse response) + throws IOException; + + protected void setContentType(HttpServletResponse response, FileFormat fileFormat) throws IOException { + String mimeType = fileFormat.getDefaultMIMEType(); + if (fileFormat.hasCharset()) { + Charset charset = fileFormat.getCharset(); + mimeType += "; charset=" + charset.name(); + } + response.setContentType(mimeType); + } + + @SuppressWarnings("rawtypes") + protected void setContentDisposition(Map model, HttpServletResponse response, FileFormat fileFormat) + throws IOException { + // Report as attachment to make use in browser more convenient + String filename = (String) model.get(FILENAME_HINT_KEY); + + if (filename == null || filename.isEmpty()) { + filename = "result"; + } + + if (fileFormat.getDefaultFileExtension() != null) { + filename += "." + fileFormat.getDefaultFileExtension(); + } + + response.setHeader("Content-Disposition", "attachment; filename=" + filename); + } + + protected void logEndOfRequest(HttpServletRequest request) { + if (logger.isInfoEnabled()) { + String queryStr = request.getParameter(QUERY_PARAM_NAME); + int qryCode = String.valueOf(queryStr).hashCode(); + logger.info("Request for query {} is finished", qryCode); + } + } + +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/RepositoryConfigRepository.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/RepositoryConfigRepository.java new file mode 100644 index 00000000000..87b0bc1e18d --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/RepositoryConfigRepository.java @@ -0,0 +1,325 @@ +/******************************************************************************* + * Copyright (c) 2017 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.repository; + +import java.io.File; +import java.net.MalformedURLException; +import java.net.URISyntaxException; +import java.util.LinkedHashSet; +import java.util.Optional; +import java.util.Set; + +import org.eclipse.rdf4j.common.iteration.CloseableIteration; +import org.eclipse.rdf4j.common.iteration.CloseableIteratorIteration; +import org.eclipse.rdf4j.http.protocol.Protocol; +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.Model; +import org.eclipse.rdf4j.model.Namespace; +import org.eclipse.rdf4j.model.Resource; +import org.eclipse.rdf4j.model.Statement; +import org.eclipse.rdf4j.model.Value; +import org.eclipse.rdf4j.model.ValueFactory; +import org.eclipse.rdf4j.model.impl.SimpleValueFactory; +import org.eclipse.rdf4j.model.impl.TreeModel; +import org.eclipse.rdf4j.query.BooleanQuery; +import org.eclipse.rdf4j.query.GraphQuery; +import org.eclipse.rdf4j.query.MalformedQueryException; +import org.eclipse.rdf4j.query.Query; +import org.eclipse.rdf4j.query.QueryLanguage; +import org.eclipse.rdf4j.query.TupleQuery; +import org.eclipse.rdf4j.query.Update; +import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.eclipse.rdf4j.repository.RepositoryException; +import org.eclipse.rdf4j.repository.RepositoryResult; +import org.eclipse.rdf4j.repository.UnknownTransactionStateException; +import org.eclipse.rdf4j.repository.base.AbstractRepository; +import org.eclipse.rdf4j.repository.base.AbstractRepositoryConnection; +import org.eclipse.rdf4j.repository.config.RepositoryConfig; +import org.eclipse.rdf4j.repository.config.RepositoryConfigUtil; +import org.eclipse.rdf4j.repository.manager.RepositoryManager; +import org.eclipse.rdf4j.rio.RDFHandler; +import org.eclipse.rdf4j.rio.RDFHandlerException; + +/** + * {@link Repository} implementation that saves {@link RepositoryConfig} RDF to a {@link RepositoryManager}. + * + * @author James Leigh + */ +public class RepositoryConfigRepository extends AbstractRepository { + + /** + * The repository identifier for the system repository that contains the configuration data. + */ + public static final String ID = "SYSTEM"; + + private final RepositoryManager manager; + + public RepositoryConfigRepository(RepositoryManager manager) { + this.manager = manager; + } + + @Override + public void setDataDir(File dataDir) { + // no-op + } + + @Override + public File getDataDir() { + return null; + } + + @Override + public boolean isWritable() throws RepositoryException { + return true; + } + + @Override + public ValueFactory getValueFactory() { + return SimpleValueFactory.getInstance(); + } + + @Override + protected void initializeInternal() throws RepositoryException { + } + + @Override + protected void shutDownInternal() throws RepositoryException { + } + + @Override + public RepositoryConnection getConnection() throws RepositoryException { + return new AbstractRepositoryConnection(this) { + + private boolean active = false; + + private Model committed = loadModel(); + + private final Model added = new TreeModel(); + + private final Model removed = new TreeModel(); + + @Override + public RepositoryResult getContextIDs() throws RepositoryException { + Set contextIDs = new LinkedHashSet<>(); + manager.getRepositoryIDs().forEach(id -> { + contextIDs.add(getContext(id)); + }); + CloseableIteration iter; + iter = new CloseableIteratorIteration<>(contextIDs.iterator()); + return new RepositoryResult<>(iter); + } + + @Override + public RepositoryResult getStatements(Resource subj, IRI pred, Value obj, + boolean includeInferred, Resource... contexts) throws RepositoryException { + CloseableIteration iter = new CloseableIteratorIteration<>( + committed.getStatements(subj, pred, obj, contexts).iterator()); + return new RepositoryResult<>(iter); + } + + @Override + public void exportStatements(Resource subj, IRI pred, Value obj, boolean includeInferred, + RDFHandler handler, Resource... contexts) throws RepositoryException, RDFHandlerException { + Model model = committed.filter(subj, pred, obj, contexts); + handler.startRDF(); + model.getNamespaces().forEach(ns -> { + handler.handleNamespace(ns.getPrefix(), ns.getName()); + }); + model.forEach(st -> { + handler.handleStatement(st); + }); + handler.endRDF(); + } + + @Override + public long size(Resource... contexts) throws RepositoryException { + return committed.filter(null, null, null, contexts).size(); + } + + @Override + public boolean isActive() throws UnknownTransactionStateException, RepositoryException { + return active; + } + + @Override + public void begin() throws RepositoryException { + active = true; + } + + @Override + public void prepare() throws RepositoryException { + // no-op + } + + @Override + public void commit() throws RepositoryException { + Set ids = new LinkedHashSet<>(); + ids.addAll(manager.getRepositoryIDs()); + ids.addAll(RepositoryConfigUtil.getRepositoryIDs(added)); + ids.forEach(id -> { + Resource ctx = getContext(id); + Model less = removed.filter(null, null, null, ctx); + Model more = added.filter(null, null, null, ctx); + Model alt = RepositoryConfigUtil.getRepositoryConfigModel(added, id); + if (!less.isEmpty() || !more.isEmpty() || alt != null) { + Model model = new TreeModel(committed.filter(null, null, null, getContext(id))); + model.removeAll(less); + removed.getNamespaces().forEach(ns -> { + model.removeNamespace(ns.getPrefix()); + }); + added.getNamespaces().forEach(ns -> { + model.setNamespace(ns); + }); + model.addAll(more); + if (alt != null) { + model.addAll(alt); + } + if (model.isEmpty()) { + manager.removeRepository(id); + } else { + manager.addRepositoryConfig(RepositoryConfigUtil.getRepositoryConfig(model, id)); + } + } + }); + committed = loadModel(); + rollback(); + } + + @Override + public void rollback() throws RepositoryException { + added.clear(); + added.getNamespaces().clear(); + removed.clear(); + removed.getNamespaces().clear(); + active = false; + } + + @Override + public RepositoryResult getNamespaces() throws RepositoryException { + CloseableIteration iter; + iter = new CloseableIteratorIteration<>(committed.getNamespaces().iterator()); + return new RepositoryResult<>(iter); + } + + @Override + public String getNamespace(String prefix) throws RepositoryException { + Optional ns = committed.getNamespace(prefix); + if (ns.isPresent()) { + return ns.get().getName(); + } else { + return null; + } + } + + @Override + public void setNamespace(String prefix, String name) throws RepositoryException { + removed.removeNamespace(prefix); + added.setNamespace(prefix, name); + } + + @Override + public void removeNamespace(String prefix) throws RepositoryException { + added.removeNamespace(prefix); + Optional ns = committed.getNamespace(prefix); + if (ns.isPresent()) { + removed.setNamespace(ns.get()); + } + } + + @Override + public void clearNamespaces() throws RepositoryException { + added.getNamespaces().clear(); + committed.getNamespaces().forEach(ns -> { + removed.setNamespace(ns); + }); + } + + @Override + public Query prepareQuery(QueryLanguage ql, String query, String baseURI) + throws RepositoryException, MalformedQueryException { + throw unsupported(); + } + + @Override + public TupleQuery prepareTupleQuery(QueryLanguage ql, String query, String baseURI) + throws RepositoryException, MalformedQueryException { + throw unsupported(); + } + + @Override + public GraphQuery prepareGraphQuery(QueryLanguage ql, String query, String baseURI) + throws RepositoryException, MalformedQueryException { + throw unsupported(); + } + + @Override + public BooleanQuery prepareBooleanQuery(QueryLanguage ql, String query, String baseURI) + throws RepositoryException, MalformedQueryException { + throw unsupported(); + } + + @Override + public Update prepareUpdate(QueryLanguage ql, String update, String baseURI) + throws RepositoryException, MalformedQueryException { + throw unsupported(); + } + + @Override + protected void addWithoutCommit(Resource subj, IRI pred, Value obj, Resource... contexts) + throws RepositoryException { + added.add(subj, pred, obj, contexts); + } + + @Override + protected void removeWithoutCommit(Resource subj, IRI pred, Value obj, Resource... contexts) + throws RepositoryException { + Model model = committed.filter(subj, pred, obj, contexts); + removed.addAll(model); + } + + private Model loadModel() { + Model model = new TreeModel(); + manager.getRepositoryIDs().forEach(id -> { + Resource ctx = getContext(id); + RepositoryConfig config = manager.getRepositoryConfig(id); + Model cfg = new TreeModel(); + config.export(cfg, ctx); + cfg.getNamespaces().forEach(ns -> { + model.setNamespace(ns); + }); + cfg.forEach(st -> { + model.add(st.getSubject(), st.getPredicate(), st.getObject(), ctx); + }); + }); + return model; + } + + private Resource getContext(String repositoryID) { + String location; + try { + location = manager.getLocation().toURI().toString(); + } catch (MalformedURLException | URISyntaxException e) { + assert false; + location = "urn:" + repositoryID; + } + String url = Protocol.getRepositoryLocation(location, repositoryID); + return getValueFactory().createIRI(url + "#" + repositoryID); + } + + private UnsupportedOperationException unsupported() { + return new UnsupportedOperationException("Query operations are not supported on the SYSTEM repository"); + } + + }; + } + +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/RepositoryController.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/RepositoryController.java new file mode 100644 index 00000000000..369f77e861f --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/RepositoryController.java @@ -0,0 +1,60 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.repository; + +import org.eclipse.rdf4j.http.server.repository.handler.DefaultQueryRequestHandler; +import org.eclipse.rdf4j.http.server.repository.handler.DefaultRepositoryRequestHandler; +import org.eclipse.rdf4j.http.server.repository.handler.QueryRequestHandler; +import org.eclipse.rdf4j.http.server.repository.handler.RepositoryRequestHandler; +import org.eclipse.rdf4j.http.server.repository.resolver.DefaultRepositoryResolver; +import org.eclipse.rdf4j.http.server.repository.resolver.RepositoryResolver; +import org.eclipse.rdf4j.repository.manager.RepositoryManager; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.context.ApplicationContextException; + +/** + * Handles queries and admin (delete) operations on a repository and renders the results in a format suitable to the + * type of operation. + * + * @author Herko ter Horst + */ +public class RepositoryController extends AbstractRepositoryController { + + private final Logger logger = LoggerFactory.getLogger(this.getClass()); + + private QueryRequestHandler queryRequestHandler; + private RepositoryRequestHandler repositoryRequestHandler; + + public RepositoryController() throws ApplicationContextException { + } + + public void setRepositoryManager(RepositoryManager repMan) { + if (logger.isDebugEnabled()) { + logger.debug("setRepositoryManager {}", repMan); + } + + RepositoryResolver repositoryResolver = new DefaultRepositoryResolver(repMan); + queryRequestHandler = new DefaultQueryRequestHandler(repositoryResolver); + repositoryRequestHandler = new DefaultRepositoryRequestHandler(repositoryResolver); + } + + @Override + protected QueryRequestHandler getQueryRequestHandler() { + return queryRequestHandler; + } + + @Override + protected RepositoryRequestHandler getRepositoryRequestHandler() { + return repositoryRequestHandler; + } + +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/RepositoryInterceptor.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/RepositoryInterceptor.java new file mode 100644 index 00000000000..6f19d10831e --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/RepositoryInterceptor.java @@ -0,0 +1,153 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.repository; + +import static jakarta.servlet.http.HttpServletResponse.SC_NOT_FOUND; + +import java.util.Objects; + +import org.eclipse.rdf4j.http.protocol.Protocol; +import org.eclipse.rdf4j.http.server.ClientHTTPException; +import org.eclipse.rdf4j.http.server.ProtocolUtil; +import org.eclipse.rdf4j.http.server.ServerHTTPException; +import org.eclipse.rdf4j.http.server.ServerInterceptor; +import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.eclipse.rdf4j.repository.RepositoryException; +import org.eclipse.rdf4j.repository.config.RepositoryConfigException; +import org.eclipse.rdf4j.repository.manager.RepositoryManager; +import org.eclipse.rdf4j.rio.helpers.BasicParserSettings; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; + +/** + * Interceptor for repository requests. Should not be a singleton bean! Configure as inner bean in openrdf-servlet.xml + * + * @author Herko ter Horst + * @author Arjohn Kampman + */ +public class RepositoryInterceptor extends ServerInterceptor { + + /*-----------* + * Constants * + *-----------*/ + + private final Logger logger = LoggerFactory.getLogger(this.getClass()); + + private static final String REPOSITORY_ID_KEY = "repositoryID"; + + private static final String REPOSITORY_KEY = "repository"; + + /*-----------* + * Variables * + *-----------*/ + + private volatile RepositoryManager repositoryManager; + + private volatile String repositoryID; + + /*---------* + * Methods * + *---------*/ + + public void setRepositoryManager(RepositoryManager repMan) { + repositoryManager = Objects.requireNonNull(repMan, "Repository manager was null"); + } + + @Override + public boolean preHandle(HttpServletRequest request, HttpServletResponse respons, Object handler) throws Exception { + String pathInfoStr = request.getPathInfo(); + logger.debug("path info: {}", pathInfoStr); + + repositoryID = null; + + if (pathInfoStr != null && !pathInfoStr.equals("/")) { + String[] pathInfo = pathInfoStr.substring(1).split("/"); + if (pathInfo.length > 0) { + repositoryID = pathInfo[0]; + logger.debug("repositoryID is '{}'", repositoryID); + } + } + + ProtocolUtil.logRequestParameters(request); + + return super.preHandle(request, respons, handler); + } + + @Override + protected String getThreadName() { + String threadName = Protocol.REPOSITORIES; + + String nextRepositoryID = repositoryID; + if (nextRepositoryID != null) { + threadName += "/" + nextRepositoryID; + } + + return threadName; + } + + @Override + protected void setRequestAttributes(HttpServletRequest request) throws ClientHTTPException, ServerHTTPException { + String nextRepositoryID = repositoryID; + if (RepositoryConfigRepository.ID.equals(nextRepositoryID)) { + request.setAttribute(REPOSITORY_ID_KEY, nextRepositoryID); + request.setAttribute(REPOSITORY_KEY, new RepositoryConfigRepository(repositoryManager)); + } else if (nextRepositoryID != null) { + try { + // For requests to delete a repository, we must not attempt to initialize the repository. Otherwise a + // corrupt/invalid configuration can block deletion. + if ("DELETE".equals(request.getMethod()) && request.getPathInfo().equals("/" + nextRepositoryID)) { + request.setAttribute(REPOSITORY_ID_KEY, nextRepositoryID); + return; + } + + Repository repository = repositoryManager.getRepository(nextRepositoryID); + if (repository == null && !"PUT".equals(request.getMethod())) { + throw new ClientHTTPException(SC_NOT_FOUND, "Unknown repository: " + nextRepositoryID); + } + + request.setAttribute(REPOSITORY_ID_KEY, nextRepositoryID); + request.setAttribute(REPOSITORY_KEY, repository); + } catch (RepositoryConfigException | RepositoryException e) { + throw new ServerHTTPException(e.getMessage(), e); + } + } + } + + public static String getRepositoryID(HttpServletRequest request) { + return (String) request.getAttribute(REPOSITORY_ID_KEY); + } + + public static Repository getRepository(HttpServletRequest request) { + return (Repository) request.getAttribute(REPOSITORY_KEY); + } + + /** + * Obtain a new {@link RepositoryConnection} with suitable parser/writer configuration for handling the incoming + * HTTP request. The caller of this method is responsible for closing the connection. + * + * @param request the {@link HttpServletRequest} for which a {@link RepositoryConnection} is to be returned + * @return a configured {@link RepositoryConnection} + */ + public static RepositoryConnection getRepositoryConnection(HttpServletRequest request) throws ClientHTTPException { + Repository repo = getRepository(request); + if (repo == null) { + throw new ClientHTTPException(SC_NOT_FOUND, "Unknown repository: " + getRepositoryID(request)); + } + RepositoryConnection conn = repo.getConnection(); + conn.getParserConfig().addNonFatalError(BasicParserSettings.VERIFY_DATATYPE_VALUES); + conn.getParserConfig().addNonFatalError(BasicParserSettings.VERIFY_LANGUAGE_TAGS); + return conn; + } +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/RepositoryListController.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/RepositoryListController.java new file mode 100644 index 00000000000..8b4378ef085 --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/RepositoryListController.java @@ -0,0 +1,104 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.repository; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.eclipse.rdf4j.http.server.ProtocolUtil; +import org.eclipse.rdf4j.http.server.ServerHTTPException; +import org.eclipse.rdf4j.model.ValueFactory; +import org.eclipse.rdf4j.model.impl.SimpleValueFactory; +import org.eclipse.rdf4j.query.BindingSet; +import org.eclipse.rdf4j.query.algebra.evaluation.QueryBindingSet; +import org.eclipse.rdf4j.query.impl.IteratingTupleQueryResult; +import org.eclipse.rdf4j.query.resultio.TupleQueryResultWriterFactory; +import org.eclipse.rdf4j.query.resultio.TupleQueryResultWriterRegistry; +import org.eclipse.rdf4j.repository.RepositoryException; +import org.eclipse.rdf4j.repository.manager.RepositoryManager; +import org.springframework.context.ApplicationContextException; +import org.springframework.web.servlet.ModelAndView; +import org.springframework.web.servlet.mvc.AbstractController; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; + +/** + * Handles requests for the list of repositories available on this server. + * + * @author Herko ter Horst + */ +public class RepositoryListController extends AbstractController { + + private RepositoryManager repositoryManager; + + public RepositoryListController() throws ApplicationContextException { + setSupportedMethods(METHOD_GET, METHOD_HEAD); + } + + public void setRepositoryManager(RepositoryManager repMan) { + repositoryManager = repMan; + } + + @Override + protected ModelAndView handleRequestInternal(HttpServletRequest request, HttpServletResponse response) + throws Exception { + Map model = new HashMap<>(); + + if (METHOD_GET.equals(request.getMethod())) { + ValueFactory vf = SimpleValueFactory.getInstance(); + + try { + List bindingNames = new ArrayList<>(); + List bindingSets = new ArrayList<>(); + + // Determine the repository's URI + StringBuffer requestURL = request.getRequestURL(); + if (requestURL.charAt(requestURL.length() - 1) != '/') { + requestURL.append('/'); + } + String namespace = requestURL.toString(); + + repositoryManager.getAllRepositoryInfos(false).forEach(info -> { + QueryBindingSet bindings = new QueryBindingSet(); + bindings.addBinding("uri", vf.createIRI(namespace, info.getId())); + bindings.addBinding("id", vf.createLiteral(info.getId())); + if (info.getDescription() != null) { + bindings.addBinding("title", vf.createLiteral(info.getDescription())); + } + bindings.addBinding("readable", vf.createLiteral(info.isReadable())); + bindings.addBinding("writable", vf.createLiteral(info.isWritable())); + bindingSets.add(bindings); + }); + + bindingNames.add("uri"); + bindingNames.add("id"); + bindingNames.add("title"); + bindingNames.add("readable"); + bindingNames.add("writable"); + model.put(QueryResultView.QUERY_RESULT_KEY, new IteratingTupleQueryResult(bindingNames, bindingSets)); + } catch (RepositoryException e) { + throw new ServerHTTPException(e.getMessage(), e); + } + } + + TupleQueryResultWriterFactory factory = ProtocolUtil.getAcceptableService(request, response, + TupleQueryResultWriterRegistry.getInstance()); + + model.put(QueryResultView.FILENAME_HINT_KEY, "repositories"); + model.put(QueryResultView.FACTORY_KEY, factory); + model.put(QueryResultView.HEADERS_ONLY, METHOD_HEAD.equals(request.getMethod())); + + return new ModelAndView(TupleQueryResultView.getInstance(), model); + } +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/TupleQueryResultView.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/TupleQueryResultView.java new file mode 100644 index 00000000000..2d3e7e0899d --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/TupleQueryResultView.java @@ -0,0 +1,119 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.repository; + +import static jakarta.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR; +import static jakarta.servlet.http.HttpServletResponse.SC_OK; +import static jakarta.servlet.http.HttpServletResponse.SC_SERVICE_UNAVAILABLE; + +import java.io.IOException; +import java.io.OutputStream; +import java.util.Map; +import java.util.regex.Pattern; + +import org.eclipse.rdf4j.query.QueryEvaluationException; +import org.eclipse.rdf4j.query.QueryInterruptedException; +import org.eclipse.rdf4j.query.QueryResults; +import org.eclipse.rdf4j.query.TupleQueryResult; +import org.eclipse.rdf4j.query.TupleQueryResultHandlerException; +import org.eclipse.rdf4j.query.resultio.BasicQueryWriterSettings; +import org.eclipse.rdf4j.query.resultio.TupleQueryResultFormat; +import org.eclipse.rdf4j.query.resultio.TupleQueryResultWriter; +import org.eclipse.rdf4j.query.resultio.TupleQueryResultWriterFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; + +/** + * View used to render tuple query results. Renders results in a format specified using a parameter or Accept header. + * + * @author Herko ter Horst + * @author Arjohn Kampman + */ +public class TupleQueryResultView extends QueryResultView { + + private final Logger logger = LoggerFactory.getLogger(this.getClass()); + + protected static final String DEFAULT_JSONP_CALLBACK_PARAMETER = "callback"; + + protected static final Pattern JSONP_VALIDATOR = Pattern.compile("^[A-Za-z]\\w+$"); + + private static final TupleQueryResultView INSTANCE = new TupleQueryResultView(); + + public static TupleQueryResultView getInstance() { + return INSTANCE; + } + + private TupleQueryResultView() { + } + + @Override + public String getContentType() { + return null; + } + + @SuppressWarnings("rawtypes") + @Override + protected void renderInternal(Map model, HttpServletRequest request, HttpServletResponse response) + throws IOException { + TupleQueryResultWriterFactory qrWriterFactory = (TupleQueryResultWriterFactory) model.get(FACTORY_KEY); + TupleQueryResultFormat qrFormat = qrWriterFactory.getTupleQueryResultFormat(); + + response.setStatus(SC_OK); + setContentType(response, qrFormat); + setContentDisposition(model, response, qrFormat); + + final Boolean headersOnly = (Boolean) model.get(HEADERS_ONLY); + if (headersOnly == null || !headersOnly.booleanValue()) { + try (OutputStream out = response.getOutputStream()) { + // ensure we handle exceptions _before_ closing the stream + try { + TupleQueryResultWriter qrWriter = qrWriterFactory.getWriter(out); + TupleQueryResult tupleQueryResult = (TupleQueryResult) model.get(QUERY_RESULT_KEY); + + if (qrWriter.getSupportedSettings().contains(BasicQueryWriterSettings.JSONP_CALLBACK)) { + String parameter = request.getParameter(DEFAULT_JSONP_CALLBACK_PARAMETER); + + if (parameter != null) { + parameter = parameter.trim(); + + if (parameter.isEmpty()) { + parameter = BasicQueryWriterSettings.JSONP_CALLBACK.getDefaultValue(); + } + + // check callback function name is a valid javascript function + // name + if (!JSONP_VALIDATOR.matcher(parameter).matches()) { + throw new IOException("Callback function name was invalid"); + } + + qrWriter.getWriterConfig().set(BasicQueryWriterSettings.JSONP_CALLBACK, parameter); + } + } + + QueryResults.report(tupleQueryResult, qrWriter); + } catch (QueryInterruptedException e) { + logger.error("Query interrupted", e); + response.sendError(SC_SERVICE_UNAVAILABLE, "Query evaluation took too long"); + } catch (QueryEvaluationException e) { + logger.error("Query evaluation error", e); + response.sendError(SC_INTERNAL_SERVER_ERROR, "Query evaluation error: " + e.getMessage()); + } catch (TupleQueryResultHandlerException e) { + logger.error("Serialization error", e); + response.sendError(SC_INTERNAL_SERVER_ERROR, "Serialization error: " + e.getMessage()); + } + } + } + logEndOfRequest(request); + } +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/config/ConfigController.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/config/ConfigController.java new file mode 100644 index 00000000000..af7ce12625a --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/config/ConfigController.java @@ -0,0 +1,109 @@ +/******************************************************************************* + * Copyright (c) 2019 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.repository.config; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; + +import org.apache.http.HttpStatus; +import org.eclipse.rdf4j.common.webapp.views.EmptySuccessView; +import org.eclipse.rdf4j.http.server.ClientHTTPException; +import org.eclipse.rdf4j.http.server.HTTPException; +import org.eclipse.rdf4j.http.server.ProtocolUtil; +import org.eclipse.rdf4j.http.server.repository.RepositoryInterceptor; +import org.eclipse.rdf4j.model.Model; +import org.eclipse.rdf4j.model.ModelFactory; +import org.eclipse.rdf4j.model.Resource; +import org.eclipse.rdf4j.model.impl.LinkedHashModelFactory; +import org.eclipse.rdf4j.model.impl.SimpleValueFactory; +import org.eclipse.rdf4j.repository.config.RepositoryConfig; +import org.eclipse.rdf4j.repository.config.RepositoryConfigUtil; +import org.eclipse.rdf4j.repository.manager.RepositoryManager; +import org.eclipse.rdf4j.rio.RDFParseException; +import org.eclipse.rdf4j.rio.RDFWriterFactory; +import org.eclipse.rdf4j.rio.RDFWriterRegistry; +import org.eclipse.rdf4j.rio.Rio; +import org.eclipse.rdf4j.rio.UnsupportedRDFormatException; +import org.springframework.context.ApplicationContextException; +import org.springframework.web.servlet.ModelAndView; +import org.springframework.web.servlet.mvc.AbstractController; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; + +/** + * Handles requests related to repository configuration. + * + * @author Jeen Broekstra + */ +public class ConfigController extends AbstractController { + + private RepositoryManager repositoryManager; + + private final ModelFactory modelFactory = new LinkedHashModelFactory(); + + public ConfigController() throws ApplicationContextException { + setSupportedMethods(METHOD_GET, METHOD_POST, METHOD_HEAD); + } + + public void setRepositoryManager(RepositoryManager repositoryManager) { + this.repositoryManager = repositoryManager; + } + + @Override + protected ModelAndView handleRequestInternal(HttpServletRequest request, HttpServletResponse response) + throws Exception { + switch (request.getMethod()) { + case METHOD_GET: + case METHOD_HEAD: + return handleQuery(request, response); + case METHOD_POST: + return handleUpdate(request, response); + default: + throw new ClientHTTPException("unrecognized method " + request.getMethod()); + } + } + + private ModelAndView handleQuery(HttpServletRequest request, HttpServletResponse response) + throws ClientHTTPException { + + RDFWriterFactory rdfWriterFactory = ProtocolUtil.getAcceptableService(request, response, + RDFWriterRegistry.getInstance()); + String repId = RepositoryInterceptor.getRepositoryID(request); + RepositoryConfig repositoryConfig = repositoryManager.getRepositoryConfig(repId); + + Model configData = modelFactory.createEmptyModel(); + String baseURI = request.getRequestURL().toString(); + Resource ctx = SimpleValueFactory.getInstance().createIRI(baseURI + "#" + repositoryConfig.getID()); + + repositoryConfig.export(configData, ctx); + Map model = new HashMap<>(); + model.put(ConfigView.FORMAT_KEY, rdfWriterFactory.getRDFFormat()); + model.put(ConfigView.CONFIG_DATA_KEY, configData); + model.put(ConfigView.HEADERS_ONLY, METHOD_HEAD.equals(request.getMethod())); + return new ModelAndView(ConfigView.getInstance(), model); + } + + private ModelAndView handleUpdate(HttpServletRequest request, HttpServletResponse response) + throws RDFParseException, UnsupportedRDFormatException, IOException, HTTPException { + String repId = RepositoryInterceptor.getRepositoryID(request); + Model model = Rio.parse(request.getInputStream(), "", + Rio.getParserFormatForMIMEType(request.getContentType()) + .orElseThrow(() -> new HTTPException(HttpStatus.SC_BAD_REQUEST, + "unrecognized content type " + request.getContentType()))); + RepositoryConfig config = RepositoryConfigUtil.getRepositoryConfig(model, repId); + repositoryManager.addRepositoryConfig(config); + return new ModelAndView(EmptySuccessView.getInstance()); + + } + +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/config/ConfigView.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/config/ConfigView.java new file mode 100644 index 00000000000..7e0df7e384f --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/config/ConfigView.java @@ -0,0 +1,95 @@ +/******************************************************************************* + * Copyright (c) 2019 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.repository.config; + +import static jakarta.servlet.http.HttpServletResponse.SC_OK; + +import java.io.OutputStream; +import java.nio.charset.Charset; +import java.util.Map; + +import org.eclipse.rdf4j.http.server.ServerHTTPException; +import org.eclipse.rdf4j.model.Model; +import org.eclipse.rdf4j.repository.RepositoryException; +import org.eclipse.rdf4j.rio.RDFFormat; +import org.eclipse.rdf4j.rio.RDFHandlerException; +import org.eclipse.rdf4j.rio.Rio; +import org.springframework.web.servlet.View; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; + +/** + * View used to export a repository config. Renders the statements as RDF using a serialization specified using a + * parameter or Accept header. + * + * @author Jeen Broekstra + */ +public class ConfigView implements View { + + public static final String CONFIG_DATA_KEY = "configData"; + + public static final String FORMAT_KEY = "format"; + + public static final String HEADERS_ONLY = "headersOnly"; + + private static final ConfigView INSTANCE = new ConfigView(); + + public static ConfigView getInstance() { + return INSTANCE; + } + + private ConfigView() { + } + + @Override + public String getContentType() { + return null; + } + + @SuppressWarnings("rawtypes") + @Override + public void render(Map model, HttpServletRequest request, HttpServletResponse response) throws Exception { + boolean headersOnly = (Boolean) model.get(HEADERS_ONLY); + + RDFFormat rdfFormat = (RDFFormat) model.get(FORMAT_KEY); + + try { + try (OutputStream out = response.getOutputStream()) { + + response.setStatus(SC_OK); + + String mimeType = rdfFormat.getDefaultMIMEType(); + if (rdfFormat.hasCharset()) { + Charset charset = rdfFormat.getCharset(); + mimeType += "; charset=" + charset.name(); + } + response.setContentType(mimeType); + + String filename = "config"; + if (rdfFormat.getDefaultFileExtension() != null) { + filename += "." + rdfFormat.getDefaultFileExtension(); + } + response.setHeader("Content-Disposition", "attachment; filename=" + filename); + + if (!headersOnly) { + Model configuration = (Model) model.get(CONFIG_DATA_KEY); + Rio.write(configuration, out, rdfFormat); + } + } + } catch (RDFHandlerException e) { + throw new ServerHTTPException("Serialization error: " + e.getMessage(), e); + } catch (RepositoryException e) { + throw new ServerHTTPException("Repository error: " + e.getMessage(), e); + } + } + +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/contexts/ContextsController.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/contexts/ContextsController.java new file mode 100644 index 00000000000..3c152374418 --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/contexts/ContextsController.java @@ -0,0 +1,83 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.repository.contexts; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.eclipse.rdf4j.common.iteration.CloseableIteration; +import org.eclipse.rdf4j.http.server.ProtocolUtil; +import org.eclipse.rdf4j.http.server.ServerHTTPException; +import org.eclipse.rdf4j.http.server.repository.QueryResultView; +import org.eclipse.rdf4j.http.server.repository.RepositoryInterceptor; +import org.eclipse.rdf4j.http.server.repository.TupleQueryResultView; +import org.eclipse.rdf4j.model.Resource; +import org.eclipse.rdf4j.query.BindingSet; +import org.eclipse.rdf4j.query.impl.IteratingTupleQueryResult; +import org.eclipse.rdf4j.query.impl.ListBindingSet; +import org.eclipse.rdf4j.query.resultio.TupleQueryResultWriterFactory; +import org.eclipse.rdf4j.query.resultio.TupleQueryResultWriterRegistry; +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.eclipse.rdf4j.repository.RepositoryException; +import org.springframework.context.ApplicationContextException; +import org.springframework.web.servlet.ModelAndView; +import org.springframework.web.servlet.mvc.AbstractController; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; + +/** + * Handles requests for the list of contexts in a repository. + * + * @author Herko ter Horst + */ +public class ContextsController extends AbstractController { + + public ContextsController() throws ApplicationContextException { + setSupportedMethods(METHOD_GET, METHOD_HEAD); + } + + @Override + protected ModelAndView handleRequestInternal(HttpServletRequest request, HttpServletResponse response) + throws Exception { + Map model = new HashMap<>(); + TupleQueryResultWriterFactory factory = ProtocolUtil.getAcceptableService(request, response, + TupleQueryResultWriterRegistry.getInstance()); + + if (METHOD_GET.equals(request.getMethod())) { + List columnNames = List.of("contextID"); + List contexts = new ArrayList<>(); + RepositoryConnection repositoryCon = RepositoryInterceptor.getRepositoryConnection(request); + try { + try (CloseableIteration contextIter = repositoryCon + .getContextIDs()) { + while (contextIter.hasNext()) { + BindingSet bindingSet = new ListBindingSet(columnNames, contextIter.next()); + contexts.add(bindingSet); + } + } + model.put(QueryResultView.QUERY_RESULT_KEY, new IteratingTupleQueryResult(columnNames, contexts)); + model.put(QueryResultView.FILENAME_HINT_KEY, "contexts"); + model.put(QueryResultView.FACTORY_KEY, factory); + model.put(QueryResultView.HEADERS_ONLY, METHOD_HEAD.equals(request.getMethod())); + model.put(QueryResultView.CONNECTION_KEY, repositoryCon); + + } catch (RepositoryException e) { + // normally the QueryResultView closes the connection, but not if an exception occurred + repositoryCon.close(); + throw new ServerHTTPException("Repository error: " + e.getMessage(), e); + } + } + return new ModelAndView(TupleQueryResultView.getInstance(), model); + } +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/graph/GraphController.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/graph/GraphController.java new file mode 100644 index 00000000000..52ee3edfe01 --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/graph/GraphController.java @@ -0,0 +1,224 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.repository.graph; + +import static jakarta.servlet.http.HttpServletResponse.SC_BAD_REQUEST; +import static jakarta.servlet.http.HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE; + +import java.io.IOException; +import java.io.InputStream; +import java.util.HashMap; +import java.util.Map; + +import org.eclipse.rdf4j.common.webapp.util.HttpServerUtil; +import org.eclipse.rdf4j.common.webapp.views.EmptySuccessView; +import org.eclipse.rdf4j.http.protocol.Protocol; +import org.eclipse.rdf4j.http.protocol.error.ErrorInfo; +import org.eclipse.rdf4j.http.protocol.error.ErrorType; +import org.eclipse.rdf4j.http.server.ClientHTTPException; +import org.eclipse.rdf4j.http.server.ProtocolUtil; +import org.eclipse.rdf4j.http.server.ServerHTTPException; +import org.eclipse.rdf4j.http.server.repository.RepositoryInterceptor; +import org.eclipse.rdf4j.http.server.repository.statements.ExportStatementsView; +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.Resource; +import org.eclipse.rdf4j.model.ValueFactory; +import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.eclipse.rdf4j.repository.RepositoryException; +import org.eclipse.rdf4j.rio.RDFFormat; +import org.eclipse.rdf4j.rio.RDFParseException; +import org.eclipse.rdf4j.rio.RDFWriterFactory; +import org.eclipse.rdf4j.rio.RDFWriterRegistry; +import org.eclipse.rdf4j.rio.Rio; +import org.eclipse.rdf4j.rio.UnsupportedRDFormatException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.context.ApplicationContextException; +import org.springframework.web.servlet.ModelAndView; +import org.springframework.web.servlet.mvc.AbstractController; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; + +/** + * Handles requests for manipulating the named graphs in a repository. + * + * @author Jeen Broekstra + */ +public class GraphController extends AbstractController { + + private final Logger logger = LoggerFactory.getLogger(this.getClass()); + + public GraphController() throws ApplicationContextException { + setSupportedMethods(METHOD_GET, METHOD_HEAD, METHOD_POST, "PUT", "DELETE"); + } + + @Override + protected ModelAndView handleRequestInternal(HttpServletRequest request, HttpServletResponse response) + throws Exception { + ModelAndView result; + + Repository repository = RepositoryInterceptor.getRepository(request); + + String reqMethod = request.getMethod(); + + if (METHOD_GET.equals(reqMethod)) { + logger.info("GET graph"); + result = getExportStatementsResult(repository, request, response); + logger.info("GET graph request finished."); + } else if (METHOD_HEAD.equals(reqMethod)) { + logger.info("HEAD graph"); + result = getExportStatementsResult(repository, request, response); + logger.info("HEAD graph request finished."); + } else if (METHOD_POST.equals(reqMethod)) { + logger.info("POST data to graph"); + result = getAddDataResult(repository, request, response, false); + logger.info("POST data request finished."); + } else if ("PUT".equals(reqMethod)) { + logger.info("PUT data in graph"); + result = getAddDataResult(repository, request, response, true); + logger.info("PUT data request finished."); + } else if ("DELETE".equals(reqMethod)) { + logger.info("DELETE data from graph"); + result = getDeleteDataResult(repository, request, response); + logger.info("DELETE data request finished."); + } else { + throw new ClientHTTPException(HttpServletResponse.SC_METHOD_NOT_ALLOWED, + "Method not allowed: " + reqMethod); + } + return result; + } + + private IRI getGraphName(HttpServletRequest request, ValueFactory vf) throws ClientHTTPException { + String requestURL = request.getRequestURL().toString(); + boolean isServiceRequest = requestURL.endsWith("/service"); + + String queryString = request.getQueryString(); + + if (isServiceRequest) { + if (!"default".equalsIgnoreCase(queryString)) { + IRI graph = ProtocolUtil.parseGraphParam(request, vf); + if (graph == null) { + throw new ClientHTTPException(HttpServletResponse.SC_BAD_REQUEST, + "Named or default graph expected for indirect reference request."); + } + return graph; + } + return null; + } else { + if (queryString != null) { + throw new ClientHTTPException(HttpServletResponse.SC_BAD_REQUEST, + "No parameters epxected for direct reference request."); + } + return vf.createIRI(requestURL); + } + } + + /** + * Get all statements and export them as RDF. + * + * @return a model and view for exporting the statements. + */ + private ModelAndView getExportStatementsResult(Repository repository, HttpServletRequest request, + HttpServletResponse response) throws ClientHTTPException { + ProtocolUtil.logRequestParameters(request); + + ValueFactory vf = repository.getValueFactory(); + + IRI graph = getGraphName(request, vf); + + RDFWriterFactory rdfWriterFactory = ProtocolUtil.getAcceptableService(request, response, + RDFWriterRegistry.getInstance()); + + Map model = new HashMap<>(); + + model.put(ExportStatementsView.CONTEXTS_KEY, new Resource[] { graph }); + model.put(ExportStatementsView.FACTORY_KEY, rdfWriterFactory); + model.put(ExportStatementsView.USE_INFERENCING_KEY, true); + model.put(ExportStatementsView.HEADERS_ONLY, METHOD_HEAD.equals(request.getMethod())); + return new ModelAndView(ExportStatementsView.getInstance(), model); + } + + /** + * Upload data to the graph. + */ + private ModelAndView getAddDataResult(Repository repository, HttpServletRequest request, + HttpServletResponse response, boolean replaceCurrent) + throws IOException, ClientHTTPException, ServerHTTPException { + ProtocolUtil.logRequestParameters(request); + + String mimeType = HttpServerUtil.getMIMEType(request.getContentType()); + + RDFFormat rdfFormat = Rio.getParserFormatForMIMEType(mimeType) + .orElseThrow( + () -> new ClientHTTPException(SC_UNSUPPORTED_MEDIA_TYPE, "Unsupported MIME type: " + mimeType)); + + ValueFactory vf = repository.getValueFactory(); + final IRI graph = getGraphName(request, vf); + + IRI baseURI = ProtocolUtil.parseURIParam(request, Protocol.BASEURI_PARAM_NAME, vf); + if (baseURI == null) { + baseURI = graph != null ? graph : vf.createIRI("foo:bar"); + logger.info("no base URI specified, using '{}'", baseURI); + } + + InputStream in = request.getInputStream(); + try (RepositoryConnection repositoryCon = RepositoryInterceptor.getRepositoryConnection(request)) { + boolean localTransaction = !repositoryCon.isActive(); + + if (localTransaction) { + repositoryCon.begin(); + } + + if (replaceCurrent) { + repositoryCon.clear(graph); + } + repositoryCon.add(in, baseURI.stringValue(), rdfFormat, graph); + + if (localTransaction) { + repositoryCon.commit(); + } + + return new ModelAndView(EmptySuccessView.getInstance()); + } catch (UnsupportedRDFormatException e) { + throw new ClientHTTPException(SC_UNSUPPORTED_MEDIA_TYPE, + "No RDF parser available for format " + rdfFormat.getName()); + } catch (RDFParseException e) { + ErrorInfo errInfo = new ErrorInfo(ErrorType.MALFORMED_DATA, e.getMessage()); + throw new ClientHTTPException(SC_BAD_REQUEST, errInfo.toString()); + } catch (IOException e) { + throw new ServerHTTPException("Failed to read data: " + e.getMessage(), e); + } catch (RepositoryException e) { + throw new ServerHTTPException("Repository update error: " + e.getMessage(), e); + } + } + + /** + * Delete data from the graph. + */ + private ModelAndView getDeleteDataResult(Repository repository, HttpServletRequest request, + HttpServletResponse response) throws ClientHTTPException, ServerHTTPException { + ProtocolUtil.logRequestParameters(request); + + ValueFactory vf = repository.getValueFactory(); + + IRI graph = getGraphName(request, vf); + + try (RepositoryConnection repositoryCon = RepositoryInterceptor.getRepositoryConnection(request)) { + repositoryCon.clear(graph); + + return new ModelAndView(EmptySuccessView.getInstance()); + } catch (RepositoryException e) { + throw new ServerHTTPException("Repository update error: " + e.getMessage(), e); + } + } +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/handler/AbstractQueryRequestHandler.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/handler/AbstractQueryRequestHandler.java new file mode 100644 index 00000000000..dc5b5cc0bb6 --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/handler/AbstractQueryRequestHandler.java @@ -0,0 +1,236 @@ +/******************************************************************************* + * Copyright (c) 2022 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.http.server.repository.handler; + +import static jakarta.servlet.http.HttpServletResponse.SC_SERVICE_UNAVAILABLE; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; + +import org.eclipse.rdf4j.common.lang.FileFormat; +import org.eclipse.rdf4j.common.lang.service.FileFormatServiceRegistry; +import org.eclipse.rdf4j.http.protocol.Protocol; +import org.eclipse.rdf4j.http.server.ClientHTTPException; +import org.eclipse.rdf4j.http.server.HTTPException; +import org.eclipse.rdf4j.http.server.ProtocolUtil; +import org.eclipse.rdf4j.http.server.ServerHTTPException; +import org.eclipse.rdf4j.http.server.repository.QueryResultView; +import org.eclipse.rdf4j.http.server.repository.resolver.RepositoryResolver; +import org.eclipse.rdf4j.query.Query; +import org.eclipse.rdf4j.query.QueryEvaluationException; +import org.eclipse.rdf4j.query.QueryInterruptedException; +import org.eclipse.rdf4j.query.explanation.Explanation; +import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.servlet.ModelAndView; +import org.springframework.web.servlet.View; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; + +/** + * A base implementation to handle an HTTP query request. + */ +public abstract class AbstractQueryRequestHandler implements QueryRequestHandler { + + private final Logger logger = LoggerFactory.getLogger(this.getClass()); + + private final RepositoryResolver repositoryResolver; + + public AbstractQueryRequestHandler(RepositoryResolver repositoryResolver) { + this.repositoryResolver = repositoryResolver; + } + + @Override + public ModelAndView handleQueryRequest( + HttpServletRequest request, RequestMethod requestMethod, + HttpServletResponse response + ) throws HTTPException, IOException { + + RepositoryConnection repositoryCon = null; + Object queryResponse = null; + + try { + Repository repository = repositoryResolver.getRepository(request); + repositoryCon = repositoryResolver.getRepositoryConnection(request, repository); + + String queryString = getQueryString(request, requestMethod); + + logQuery(requestMethod, queryString); + + Query query = getQuery(request, repositoryCon, queryString); + + boolean headersOnly = requestMethod == RequestMethod.HEAD; + long limit = getLimit(request); + long offset = getOffset(request); + boolean distinct = isDistinct(request); + final Optional explainLevel = getExplain(request); + try { + if (!headersOnly) { + // explain param is present, return the query explanation + if (explainLevel.isPresent()) { + final Explanation explanation = explainQuery(query, explainLevel.get()); + return getExplainQueryResponse(request, response, explanation); + } + queryResponse = evaluateQuery(query, limit, offset, distinct); + } + + FileFormatServiceRegistry registry = getResultWriterFor(query); + if (registry == null) { + throw new UnsupportedOperationException( + "Unknown result writer for query of type: " + query.getClass().getName()); + } + + View view = getViewFor(query); + if (view == null) { + throw new UnsupportedOperationException( + "Unknown view for query of type: " + query.getClass().getName()); + } + + return getModelAndView(request, response, headersOnly, repositoryCon, view, queryResponse, registry); + + } catch (QueryInterruptedException e) { + logger.info("Query interrupted", e); + throw new ServerHTTPException(SC_SERVICE_UNAVAILABLE, "Query evaluation took too long"); + + } catch (QueryEvaluationException e) { + logger.info("Query evaluation error", e); + if (e.getCause() != null && e.getCause() instanceof HTTPException) { + // custom signal from the backend, throw as HTTPException + // directly (see SES-1016). + throw (HTTPException) e.getCause(); + } else { + throw new ServerHTTPException("Query evaluation error: " + e.getMessage()); + } + } + + } catch (Exception e) { + // only close the response & connection when an exception occurs. Otherwise, the QueryResultView will take + // care of closing it. + try { + if (queryResponse instanceof AutoCloseable) { + ((AutoCloseable) queryResponse).close(); + } + } catch (Exception qre) { + logger.warn("Query response closing error", qre); + } finally { + try { + if (repositoryCon != null) { + repositoryCon.close(); + } + } catch (Exception qre) { + logger.warn("Connection closing error", qre); + } + } + throw e; + } + + } + + protected Explanation explainQuery(final Query query, final Explanation.Level explainLevel) + throws ServerHTTPException { + throw new ServerHTTPException("unimplemented explainQuery feature"); + } + + protected abstract ModelAndView getExplainQueryResponse( + final HttpServletRequest request, final HttpServletResponse response, final Explanation explanation); + + abstract protected Object evaluateQuery(Query query, long limit, long offset, boolean distinct) + throws ClientHTTPException; + + abstract protected View getViewFor(Query query); + + abstract protected FileFormatServiceRegistry getResultWriterFor(Query query); + + abstract protected String getQueryString(HttpServletRequest request, RequestMethod requestMethod) + throws HTTPException; + + abstract protected Query getQuery(HttpServletRequest request, RepositoryConnection repositoryCon, + String queryString) throws IOException, HTTPException; + + protected ModelAndView getModelAndView( + HttpServletRequest request, HttpServletResponse response, + boolean headersOnly, RepositoryConnection repositoryCon, View view, Object queryResult, + FileFormatServiceRegistry registry + ) throws ClientHTTPException { + Map model = new HashMap<>(); + model.put(QueryResultView.FILENAME_HINT_KEY, "query-result"); + model.put(QueryResultView.QUERY_RESULT_KEY, queryResult); + model.put(QueryResultView.FACTORY_KEY, ProtocolUtil.getAcceptableService(request, response, registry)); + model.put(QueryResultView.HEADERS_ONLY, headersOnly); + model.put(QueryResultView.CONNECTION_KEY, repositoryCon); + + return new ModelAndView(view, model); + } + + protected boolean isDistinct(HttpServletRequest request) throws ClientHTTPException { + return getParam(request, Protocol.DISTINCT_PARAM_NAME, false, Boolean.TYPE); + } + + protected long getOffset(HttpServletRequest request) throws ClientHTTPException { + return getParam(request, Protocol.OFFSET_PARAM_NAME, 0L, Long.TYPE); + } + + protected long getLimit(HttpServletRequest request) throws ClientHTTPException { + return getParam(request, Protocol.LIMIT_PARAM_NAME, 0L, Long.TYPE); + } + + protected Optional getExplain(HttpServletRequest request) throws ClientHTTPException { + final String explainString = request.getParameter(Protocol.EXPLAIN_PARAM_NAME); + if (explainString == null) { + return Optional.empty(); + } + try { + final Explanation.Level level = Explanation.Level.valueOf(explainString); + return Optional.of(level); + } catch (final IllegalArgumentException e) { + throw new ClientHTTPException("Invalid explanation level: " + explainString, e); + } + } + + T getParam(HttpServletRequest request, String distinctParamName, T defaultValue, Class clazz) + throws ClientHTTPException { + if (clazz == Boolean.TYPE) { + return (T) (Boolean) ProtocolUtil.parseBooleanParam(request, distinctParamName, (Boolean) defaultValue); + } + if (clazz == Long.TYPE) { + return (T) (Long) ProtocolUtil.parseLongParam(request, distinctParamName, (Long) defaultValue); + } + throw new UnsupportedOperationException("Class not supported: " + clazz); + } + + private void logQuery(RequestMethod requestMethod, String queryString) { + if (logger.isInfoEnabled() || logger.isDebugEnabled()) { + int queryHashCode = queryString.hashCode(); + + switch (requestMethod) { + case GET: + logger.info("GET query {}", queryHashCode); + break; + case HEAD: + logger.info("HEAD query {}", queryHashCode); + break; + case POST: + logger.info("POST query {}", queryHashCode); + break; + } + + logger.debug("query {} = {}", queryHashCode, queryString); + } + } + +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/handler/DefaultQueryRequestHandler.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/handler/DefaultQueryRequestHandler.java new file mode 100644 index 00000000000..1fab77fb2b4 --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/handler/DefaultQueryRequestHandler.java @@ -0,0 +1,307 @@ +/******************************************************************************* + * Copyright (c) 2022 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.http.server.repository.handler; + +import static jakarta.servlet.http.HttpServletResponse.SC_BAD_REQUEST; +import static jakarta.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR; +import static jakarta.servlet.http.HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE; +import static org.eclipse.rdf4j.http.protocol.Protocol.BINDING_PREFIX; +import static org.eclipse.rdf4j.http.protocol.Protocol.DEFAULT_GRAPH_PARAM_NAME; +import static org.eclipse.rdf4j.http.protocol.Protocol.INCLUDE_INFERRED_PARAM_NAME; +import static org.eclipse.rdf4j.http.protocol.Protocol.NAMED_GRAPH_PARAM_NAME; +import static org.eclipse.rdf4j.http.protocol.Protocol.QUERY_LANGUAGE_PARAM_NAME; +import static org.eclipse.rdf4j.http.protocol.Protocol.QUERY_PARAM_NAME; + +import java.io.IOException; +import java.util.Enumeration; +import java.util.HashMap; +import java.util.Map; + +import org.apache.commons.io.IOUtils; +import org.apache.http.HttpStatus; +import org.eclipse.rdf4j.common.lang.FileFormat; +import org.eclipse.rdf4j.common.lang.service.FileFormatServiceRegistry; +import org.eclipse.rdf4j.common.webapp.util.HttpServerUtil; +import org.eclipse.rdf4j.http.protocol.Protocol; +import org.eclipse.rdf4j.http.protocol.error.ErrorInfo; +import org.eclipse.rdf4j.http.protocol.error.ErrorType; +import org.eclipse.rdf4j.http.server.ClientHTTPException; +import org.eclipse.rdf4j.http.server.HTTPException; +import org.eclipse.rdf4j.http.server.ProtocolUtil; +import org.eclipse.rdf4j.http.server.repository.*; +import org.eclipse.rdf4j.http.server.repository.resolver.RepositoryResolver; +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.Value; +import org.eclipse.rdf4j.model.ValueFactory; +import org.eclipse.rdf4j.query.BooleanQuery; +import org.eclipse.rdf4j.query.Dataset; +import org.eclipse.rdf4j.query.GraphQuery; +import org.eclipse.rdf4j.query.GraphQueryResult; +import org.eclipse.rdf4j.query.MalformedQueryException; +import org.eclipse.rdf4j.query.Query; +import org.eclipse.rdf4j.query.QueryLanguage; +import org.eclipse.rdf4j.query.QueryResults; +import org.eclipse.rdf4j.query.TupleQuery; +import org.eclipse.rdf4j.query.TupleQueryResult; +import org.eclipse.rdf4j.query.UnsupportedQueryLanguageException; +import org.eclipse.rdf4j.query.explanation.Explanation; +import org.eclipse.rdf4j.query.impl.SimpleDataset; +import org.eclipse.rdf4j.query.resultio.BooleanQueryResultWriterRegistry; +import org.eclipse.rdf4j.query.resultio.TupleQueryResultWriterRegistry; +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.eclipse.rdf4j.repository.RepositoryException; +import org.eclipse.rdf4j.rio.RDFWriterRegistry; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.servlet.ModelAndView; +import org.springframework.web.servlet.View; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; + +public class DefaultQueryRequestHandler extends AbstractQueryRequestHandler { + + private final Logger logger = LoggerFactory.getLogger(this.getClass()); + + public DefaultQueryRequestHandler(RepositoryResolver repositoryResolver) { + super(repositoryResolver); + } + + @Override + protected Explanation explainQuery(final Query query, final Explanation.Level level) { + return query.explain(level); + } + + @Override + protected ModelAndView getExplainQueryResponse( + final HttpServletRequest request, final HttpServletResponse response, + final Explanation explanation + ) { + Map model = new HashMap<>(); + model.put(QueryResultView.FILENAME_HINT_KEY, "query-result"); + model.put(QueryResultView.QUERY_EXPLAIN_RESULT_KEY, explanation); + return new ModelAndView(new ExplainQueryResultView(), model); + } + + @Override + protected Object evaluateQuery(Query query, long limit, long offset, boolean distinct) throws ClientHTTPException { + if (query instanceof TupleQuery) { + return evaluateQuery((TupleQuery) query, limit, offset, distinct); + } else if (query instanceof GraphQuery) { + return evaluateQuery((GraphQuery) query, limit, offset, distinct); + } else if (query instanceof BooleanQuery) { + return evaluateQuery((BooleanQuery) query, limit, offset, distinct); + } else { + throw new ClientHTTPException(SC_BAD_REQUEST, + "Unsupported query type: " + query.getClass().getName()); + } + } + + protected Boolean evaluateQuery(BooleanQuery query, long limit, long offset, boolean distinct) { + return query.evaluate(); + } + + protected GraphQueryResult evaluateQuery(GraphQuery query, long limit, long offset, boolean distinct) { + GraphQueryResult qqr = distinct ? QueryResults.distinctResults(query.evaluate()) : query.evaluate(); + return QueryResults.limitResults(qqr, limit, offset); + } + + protected TupleQueryResult evaluateQuery(TupleQuery query, long limit, long offset, boolean distinct) { + TupleQueryResult tqr = distinct ? QueryResults.distinctResults(query.evaluate()) : query.evaluate(); + return QueryResults.limitResults(tqr, limit, offset); + } + + @Override + protected View getViewFor(Query query) { + if (query instanceof TupleQuery) { + return TupleQueryResultView.getInstance(); + } else if (query instanceof GraphQuery) { + return GraphQueryResultView.getInstance(); + } else if (query instanceof BooleanQuery) { + return BooleanQueryResultView.getInstance(); + } + + return null; + } + + @Override + protected FileFormatServiceRegistry getResultWriterFor(Query query) { + if (query instanceof TupleQuery) { + return TupleQueryResultWriterRegistry.getInstance(); + } else if (query instanceof GraphQuery) { + return RDFWriterRegistry.getInstance(); + } else if (query instanceof BooleanQuery) { + return BooleanQueryResultWriterRegistry.getInstance(); + } + + return null; + } + + @Override + protected String getQueryString(HttpServletRequest request, RequestMethod requestMethod) throws HTTPException { + + String queryString; + if (requestMethod == RequestMethod.POST) { + String mimeType = HttpServerUtil.getMIMEType(request.getContentType()); + + switch (mimeType) { + case Protocol.SPARQL_QUERY_MIME_TYPE: + // The query should be the entire body + try { + queryString = IOUtils.toString(request.getReader()); + } catch (IOException e) { + throw new HTTPException(HttpStatus.SC_BAD_REQUEST, "Error reading request message body", e); + } + break; + case Protocol.FORM_MIME_TYPE: + queryString = request.getParameter(QUERY_PARAM_NAME); + break; + default: + throw new ClientHTTPException(SC_UNSUPPORTED_MEDIA_TYPE, "Unsupported MIME type: " + mimeType); + } + + } else { + queryString = request.getParameter(QUERY_PARAM_NAME); + } + + if (queryString == null) { + throw new ClientHTTPException(SC_BAD_REQUEST, "Missing parameter: " + QUERY_PARAM_NAME); + } + + return queryString; + } + + @Override + protected Query getQuery(HttpServletRequest request, + RepositoryConnection repositoryCon, String queryString) throws IOException, HTTPException { + + QueryLanguage queryLn = getQueryLanguage(request.getParameter(QUERY_LANGUAGE_PARAM_NAME)); + String baseIRI = request.getParameter(Protocol.BASEURI_PARAM_NAME); + + try { + Query query = repositoryCon.prepareQuery(queryLn, queryString, baseIRI); + + setQueryParameters(request, repositoryCon, query); + + return query; + + } catch (UnsupportedQueryLanguageException e) { + ErrorInfo errInfo = new ErrorInfo(ErrorType.UNSUPPORTED_QUERY_LANGUAGE, queryLn.getName()); + throw new ClientHTTPException(SC_BAD_REQUEST, errInfo.toString()); + } catch (MalformedQueryException e) { + ErrorInfo errInfo = new ErrorInfo(ErrorType.MALFORMED_QUERY, e.getMessage()); + throw new ClientHTTPException(SC_BAD_REQUEST, errInfo.toString()); + } catch (RepositoryException e) { + logger.error("Repository error", e); + throw new ClientHTTPException(SC_INTERNAL_SERVER_ERROR, e.getMessage()); + } + + } + + protected void setQueryParameters(HttpServletRequest request, RepositoryConnection repositoryCon, Query query) + throws ClientHTTPException { + // determine if inferred triples should be included in query evaluation + query.setIncludeInferred(getIncludeInferred(request)); + + int maxExecutionTime = getMaxExecutionTime(request); + if (maxExecutionTime > 0) { + query.setMaxExecutionTime(maxExecutionTime); + } + + Dataset dataset = getDataset(request, repositoryCon.getValueFactory(), query); + if (dataset != null) { + query.setDataset(dataset); + } + + // determine if any variable bindings have been set on this query. + Enumeration parameterNames = request.getParameterNames(); + + while (parameterNames.hasMoreElements()) { + String parameterName = parameterNames.nextElement(); + + if (parameterName.startsWith(BINDING_PREFIX) && parameterName.length() > BINDING_PREFIX.length()) { + String bindingName = parameterName.substring(BINDING_PREFIX.length()); + Value bindingValue = ProtocolUtil.parseValueParam(request, parameterName, + repositoryCon.getValueFactory()); + query.setBinding(bindingName, bindingValue); + } + } + } + + protected int getMaxExecutionTime(HttpServletRequest request) throws ClientHTTPException { + return ProtocolUtil.parseTimeoutParam(request); + } + + protected boolean getIncludeInferred(HttpServletRequest request) throws ClientHTTPException { + return getParam(request, INCLUDE_INFERRED_PARAM_NAME, true, Boolean.TYPE); + } + + protected SimpleDataset getDataset(HttpServletRequest request, ValueFactory valueFactory, Query query) + throws ClientHTTPException { + + String[] defaultGraphIRIs = request.getParameterValues(DEFAULT_GRAPH_PARAM_NAME); + String[] namedGraphIRIs = request.getParameterValues(NAMED_GRAPH_PARAM_NAME); + + if (defaultGraphIRIs == null && namedGraphIRIs == null) { + return null; + } + + SimpleDataset dataset = new SimpleDataset(); + + if (defaultGraphIRIs != null) { + for (String defaultGraphIRI : defaultGraphIRIs) { + try { + IRI iri = createIRIOrNull(valueFactory, defaultGraphIRI); + dataset.addDefaultGraph(iri); + } catch (IllegalArgumentException e) { + throw new ClientHTTPException(SC_BAD_REQUEST, "Illegal IRI for default graph: " + defaultGraphIRI); + } + } + } + + if (namedGraphIRIs != null) { + for (String namedGraphIRI : namedGraphIRIs) { + try { + IRI iri = createIRIOrNull(valueFactory, namedGraphIRI); + dataset.addNamedGraph(iri); + } catch (IllegalArgumentException e) { + throw new ClientHTTPException(SC_BAD_REQUEST, "Illegal IRI for named graph: " + namedGraphIRI); + } + } + } + + return dataset; + } + + protected QueryLanguage getQueryLanguage(String queryLanguageParamName) throws ClientHTTPException { + if (queryLanguageParamName != null) { + logger.debug("query language param = {}", queryLanguageParamName); + + QueryLanguage queryLn = QueryLanguage.valueOf(queryLanguageParamName); + if (queryLn == null) { + throw new ClientHTTPException(SC_BAD_REQUEST, "Unknown query language: " + queryLanguageParamName); + } + return queryLn; + } else { + return QueryLanguage.SPARQL; + } + } + + private IRI createIRIOrNull(ValueFactory valueFactory, String graphIRI) { + if ("null".equals(graphIRI)) { + return null; + } + return valueFactory.createIRI(graphIRI); + } + +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/handler/DefaultRepositoryRequestHandler.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/handler/DefaultRepositoryRequestHandler.java new file mode 100644 index 00000000000..2215aed84d2 --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/handler/DefaultRepositoryRequestHandler.java @@ -0,0 +1,107 @@ +/******************************************************************************* + * Copyright (c) 2022 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.http.server.repository.handler; + +import static org.eclipse.rdf4j.http.protocol.Protocol.QUERY_PARAM_NAME; + +import java.io.IOException; + +import org.apache.http.HttpStatus; +import org.eclipse.rdf4j.common.exception.RDF4JException; +import org.eclipse.rdf4j.common.webapp.views.EmptySuccessView; +import org.eclipse.rdf4j.http.protocol.error.ErrorInfo; +import org.eclipse.rdf4j.http.protocol.error.ErrorType; +import org.eclipse.rdf4j.http.server.ClientHTTPException; +import org.eclipse.rdf4j.http.server.HTTPException; +import org.eclipse.rdf4j.http.server.ServerHTTPException; +import org.eclipse.rdf4j.http.server.repository.resolver.RepositoryResolver; +import org.eclipse.rdf4j.model.Model; +import org.eclipse.rdf4j.repository.config.RepositoryConfig; +import org.eclipse.rdf4j.repository.config.RepositoryConfigException; +import org.eclipse.rdf4j.rio.Rio; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.web.servlet.ModelAndView; + +import jakarta.servlet.http.HttpServletRequest; + +public class DefaultRepositoryRequestHandler implements RepositoryRequestHandler { + + private final Logger logger = LoggerFactory.getLogger(this.getClass()); + + private final RepositoryResolver repositoryResolver; + + public DefaultRepositoryRequestHandler(RepositoryResolver repositoryResolver) { + this.repositoryResolver = repositoryResolver; + } + + public ModelAndView handleDeleteRepositoryRequest(HttpServletRequest request) throws HTTPException { + String repId = repositoryResolver.getRepositoryID(request); + logger.info("DELETE request invoked for repository '" + repId + "'"); + + if (request.getParameter(QUERY_PARAM_NAME) != null) { + logger.warn("query supplied on repository delete request, aborting delete"); + throw new HTTPException(HttpStatus.SC_BAD_REQUEST, + "Repository delete error: query supplied with request"); + } + + try { + boolean success = repositoryResolver.getRepositoryManager().removeRepository(repId); + if (success) { + logger.info("DELETE request successfully completed"); + return new ModelAndView(EmptySuccessView.getInstance()); + } else { + logger.error("error while attempting to delete repository '" + repId + "'"); + throw new HTTPException(HttpStatus.SC_BAD_REQUEST, + "could not locate repository configuration for repository '" + repId + "'."); + } + } catch (RDF4JException e) { + logger.error("error while attempting to delete repository '" + repId + "'", e); + throw new ServerHTTPException("Repository delete error: " + e.getMessage(), e); + } + } + + public ModelAndView handleCreateOrUpdateRepositoryRequest(HttpServletRequest request) + throws IOException, HTTPException { + // create new repo + String repId = repositoryResolver.getRepositoryID(request); + logger.info("PUT request invoked for repository '" + repId + "'"); + try { + if (repositoryResolver.getRepositoryManager().hasRepositoryConfig(repId)) { + ErrorInfo errorInfo = new ErrorInfo(ErrorType.REPOSITORY_EXISTS, + "repository already exists: " + repId); + throw new ClientHTTPException(HttpStatus.SC_CONFLICT, errorInfo.toString()); + } + Model model = Rio.parse(request.getInputStream(), "", + Rio.getParserFormatForMIMEType(request.getContentType()) + .orElseThrow(() -> new HTTPException(HttpStatus.SC_BAD_REQUEST, + "unrecognized content type " + request.getContentType()))); + + RepositoryConfig config = repositoryResolver.getRepositoryConfig(repId, model); + + if (config == null) { + throw new RepositoryConfigException("could not read repository config from supplied data"); + } + repositoryResolver.getRepositoryManager().addRepositoryConfig(config); + + return new ModelAndView(EmptySuccessView.getInstance()); + } catch (RepositoryConfigException e) { + ErrorInfo errorInfo = new ErrorInfo(ErrorType.MALFORMED_DATA, + "Supplied repository configuration is invalid: " + e.getMessage()); + throw new ClientHTTPException(HttpStatus.SC_BAD_REQUEST, errorInfo.toString()); + } catch (RDF4JException e) { + logger.error("error while attempting to create/configure repository '" + repId + "'", e); + throw new ServerHTTPException("Repository create error: " + e.getMessage(), e); + } + } + +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/handler/QueryRequestHandler.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/handler/QueryRequestHandler.java new file mode 100644 index 00000000000..bf44d7de088 --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/handler/QueryRequestHandler.java @@ -0,0 +1,28 @@ +/******************************************************************************* + * Copyright (c) 2022 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.http.server.repository.handler; + +import org.springframework.web.bind.annotation.RequestMethod; +import org.springframework.web.servlet.ModelAndView; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; + +/** + * Interface used by the {@link org.eclipse.rdf4j.http.server.repository.AbstractRepositoryController} to process a + * query. + */ +public interface QueryRequestHandler { + + ModelAndView handleQueryRequest(HttpServletRequest request, RequestMethod requestMethod, + HttpServletResponse response) throws Exception; +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/handler/RepositoryRequestHandler.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/handler/RepositoryRequestHandler.java new file mode 100644 index 00000000000..d076aa67289 --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/handler/RepositoryRequestHandler.java @@ -0,0 +1,28 @@ +/******************************************************************************* + * Copyright (c) 2022 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.http.server.repository.handler; + +import org.springframework.web.servlet.ModelAndView; + +import jakarta.servlet.http.HttpServletRequest; + +/** + * An interface used by {@link org.eclipse.rdf4j.http.server.repository.AbstractRepositoryController} to process HTTP + * request for creating or deleting a repository. + */ +public interface RepositoryRequestHandler { + + ModelAndView handleCreateOrUpdateRepositoryRequest(HttpServletRequest request) throws Exception; + + ModelAndView handleDeleteRepositoryRequest(HttpServletRequest request) throws Exception; + +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/namespaces/NamespaceController.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/namespaces/NamespaceController.java new file mode 100644 index 00000000000..55e75c88cce --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/namespaces/NamespaceController.java @@ -0,0 +1,165 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.repository.namespaces; + +import static jakarta.servlet.http.HttpServletResponse.SC_BAD_REQUEST; +import static jakarta.servlet.http.HttpServletResponse.SC_NOT_FOUND; + +import java.io.IOException; +import java.net.URISyntaxException; +import java.util.HashMap; +import java.util.Map; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.eclipse.rdf4j.common.io.IOUtil; +import org.eclipse.rdf4j.common.net.ParsedIRI; +import org.eclipse.rdf4j.common.webapp.views.EmptySuccessView; +import org.eclipse.rdf4j.common.webapp.views.SimpleResponseView; +import org.eclipse.rdf4j.http.server.ClientHTTPException; +import org.eclipse.rdf4j.http.server.ServerHTTPException; +import org.eclipse.rdf4j.http.server.repository.RepositoryInterceptor; +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.eclipse.rdf4j.repository.RepositoryException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.context.ApplicationContextException; +import org.springframework.web.servlet.ModelAndView; +import org.springframework.web.servlet.mvc.AbstractController; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; + +/** + * Handles requests for manipulating a specific namespace definition in a repository. + * + * @author Herko ter Horst + * @author Arjohn Kampman + */ +public class NamespaceController extends AbstractController { + private final Logger logger = LoggerFactory.getLogger(this.getClass()); + + public NamespaceController() throws ApplicationContextException { + setSupportedMethods(METHOD_GET, METHOD_HEAD, "PUT", "DELETE"); + } + + @Override + protected ModelAndView handleRequestInternal(HttpServletRequest request, HttpServletResponse response) + throws Exception { + String pathInfoStr = request.getPathInfo(); + String prefix = pathInfoStr.substring(pathInfoStr.lastIndexOf('/') + 1); + + String reqMethod = request.getMethod(); + + if (METHOD_HEAD.equals(reqMethod)) { + logger.info("HEAD namespace for prefix {}", prefix); + + Map model = new HashMap<>(); + return new ModelAndView(SimpleResponseView.getInstance(), model); + } + + if (METHOD_GET.equals(reqMethod)) { + logger.info("GET namespace for prefix {}", prefix); + return getExportNamespaceResult(request, prefix); + } else if ("PUT".equals(reqMethod)) { + logger.info("PUT prefix {}", prefix); + return getUpdateNamespaceResult(request, prefix); + } else if ("DELETE".equals(reqMethod)) { + logger.info("DELETE prefix {}", prefix); + return getRemoveNamespaceResult(request, prefix); + } else { + throw new ServerHTTPException("Unexpected request method: " + reqMethod); + } + } + + private ModelAndView getExportNamespaceResult(HttpServletRequest request, String prefix) + throws ServerHTTPException, ClientHTTPException { + try (RepositoryConnection repositoryCon = RepositoryInterceptor.getRepositoryConnection(request)) { + String namespace = repositoryCon.getNamespace(prefix); + + if (namespace == null) { + throw new ClientHTTPException(SC_NOT_FOUND, "Undefined prefix: " + prefix); + } + + Map model = new HashMap<>(); + model.put(SimpleResponseView.CONTENT_KEY, namespace); + + return new ModelAndView(SimpleResponseView.getInstance(), model); + } catch (RepositoryException e) { + throw new ServerHTTPException("Repository error: " + e.getMessage(), e); + } + } + + private ModelAndView getUpdateNamespaceResult(HttpServletRequest request, String prefix) + throws IOException, ClientHTTPException, ServerHTTPException { + String namespace = IOUtil.readString(request.getReader()); + namespace = namespace.trim(); + + validateUpdateNamespaceData(prefix, namespace); + + try (RepositoryConnection repositoryCon = RepositoryInterceptor.getRepositoryConnection(request)) { + repositoryCon.setNamespace(prefix, namespace); + } catch (RepositoryException e) { + throw new ServerHTTPException("Repository error: " + e.getMessage(), e); + } + + return new ModelAndView(EmptySuccessView.getInstance()); + } + + private ModelAndView getRemoveNamespaceResult(HttpServletRequest request, String prefix) + throws ServerHTTPException, ClientHTTPException { + try (RepositoryConnection repositoryCon = RepositoryInterceptor.getRepositoryConnection(request)) { + repositoryCon.removeNamespace(prefix); + } catch (RepositoryException e) { + throw new ServerHTTPException("Repository error: " + e.getMessage(), e); + } + + return new ModelAndView(EmptySuccessView.getInstance()); + } + + private void validateUpdateNamespaceData(String prefix, String namespace) throws ClientHTTPException { + if (namespace.isEmpty()) { + throw new ClientHTTPException(SC_BAD_REQUEST, "No namespace name found in request body"); + } + + if (!isValidPrefix(prefix)) { + throw new ClientHTTPException(SC_BAD_REQUEST, "Prefix not valid"); + } + + if (!isValidNamespaceIri(namespace)) { + throw new ClientHTTPException(SC_BAD_REQUEST, "Namespace not valid"); + } + } + + private static final String PN_CHARS_BASE = "[A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF" + + "\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD" + + "\uD800\uDC00-\uDB7F\uDFFF]"; // <- \u10000-\uEFFFF expressed with surrogate pairs + private static final String PN_CHARS_U = "(?:" + PN_CHARS_BASE + "|_)"; + private static final String PN_CHARS = "(?:" + PN_CHARS_U + "|[0-9\u0300-\u036F\u203F-\u2040\u00B7-])"; + private static final String PN_PREFIX = PN_CHARS_BASE + "(?:(?:" + PN_CHARS + "|\\.)*" + PN_CHARS + ")?"; + private static Pattern PREFIX_PATTERN = Pattern.compile(PN_PREFIX); + + private static boolean isValidPrefix(String value) { + if (value.isEmpty()) + return true; + Matcher matcher = PREFIX_PATTERN.matcher(value); + return (matcher.find() && matcher.start() == 0 && matcher.end() == value.length()); + } + + private boolean isValidNamespaceIri(String namespace) { + try { + return new ParsedIRI(namespace).isAbsolute(); + } catch (URISyntaxException e) { + logger.debug("Namespace: {} isn't parseable.", namespace, e); + return false; + } + } +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/namespaces/NamespacesController.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/namespaces/NamespacesController.java new file mode 100644 index 00000000000..5252683abec --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/namespaces/NamespacesController.java @@ -0,0 +1,131 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.repository.namespaces; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.eclipse.rdf4j.common.iteration.CloseableIteration; +import org.eclipse.rdf4j.common.webapp.views.EmptySuccessView; +import org.eclipse.rdf4j.http.server.ClientHTTPException; +import org.eclipse.rdf4j.http.server.ProtocolUtil; +import org.eclipse.rdf4j.http.server.ServerHTTPException; +import org.eclipse.rdf4j.http.server.repository.QueryResultView; +import org.eclipse.rdf4j.http.server.repository.RepositoryInterceptor; +import org.eclipse.rdf4j.http.server.repository.TupleQueryResultView; +import org.eclipse.rdf4j.model.Literal; +import org.eclipse.rdf4j.model.Namespace; +import org.eclipse.rdf4j.model.ValueFactory; +import org.eclipse.rdf4j.query.BindingSet; +import org.eclipse.rdf4j.query.impl.IteratingTupleQueryResult; +import org.eclipse.rdf4j.query.impl.ListBindingSet; +import org.eclipse.rdf4j.query.resultio.TupleQueryResultWriterFactory; +import org.eclipse.rdf4j.query.resultio.TupleQueryResultWriterRegistry; +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.eclipse.rdf4j.repository.RepositoryException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.context.ApplicationContextException; +import org.springframework.web.servlet.ModelAndView; +import org.springframework.web.servlet.mvc.AbstractController; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; + +/** + * Handles requests for the list of namespace definitions for a repository. + * + * @author Herko ter Horst + */ +public class NamespacesController extends AbstractController { + + private final Logger logger = LoggerFactory.getLogger(this.getClass()); + + public NamespacesController() throws ApplicationContextException { + setSupportedMethods(METHOD_GET, METHOD_HEAD, "DELETE"); + } + + @Override + protected ModelAndView handleRequestInternal(HttpServletRequest request, HttpServletResponse response) + throws Exception { + String reqMethod = request.getMethod(); + if (METHOD_GET.equals(reqMethod)) { + logger.info("GET namespace list"); + return getExportNamespacesResult(request, response); + } + if (METHOD_HEAD.equals(reqMethod)) { + logger.info("HEAD namespace list"); + return getExportNamespacesResult(request, response); + } else if ("DELETE".equals(reqMethod)) { + logger.info("DELETE namespaces"); + return getClearNamespacesResult(request, response); + } + + throw new ClientHTTPException(HttpServletResponse.SC_METHOD_NOT_ALLOWED, "Method not allowed: " + reqMethod); + } + + private ModelAndView getExportNamespacesResult(HttpServletRequest request, HttpServletResponse response) + throws ClientHTTPException, ServerHTTPException { + final boolean headersOnly = METHOD_HEAD.equals(request.getMethod()); + + Map model = new HashMap<>(); + if (!headersOnly) { + List columnNames = Arrays.asList("prefix", "namespace"); + List namespaces = new ArrayList<>(); + + try (RepositoryConnection repositoryCon = RepositoryInterceptor.getRepositoryConnection(request)) { + final ValueFactory vf = repositoryCon.getValueFactory(); + try { + try (CloseableIteration iter = repositoryCon + .getNamespaces()) { + while (iter.hasNext()) { + Namespace ns = iter.next(); + + Literal prefix = vf.createLiteral(ns.getPrefix()); + Literal namespace = vf.createLiteral(ns.getName()); + + BindingSet bindingSet = new ListBindingSet(columnNames, prefix, namespace); + namespaces.add(bindingSet); + } + } + } catch (RepositoryException e) { + throw new ServerHTTPException("Repository error: " + e.getMessage(), e); + } + } + model.put(QueryResultView.QUERY_RESULT_KEY, new IteratingTupleQueryResult(columnNames, namespaces)); + } + + TupleQueryResultWriterFactory factory = ProtocolUtil.getAcceptableService(request, response, + TupleQueryResultWriterRegistry.getInstance()); + + model.put(QueryResultView.FILENAME_HINT_KEY, "namespaces"); + model.put(QueryResultView.HEADERS_ONLY, headersOnly); + model.put(QueryResultView.FACTORY_KEY, factory); + + return new ModelAndView(TupleQueryResultView.getInstance(), model); + } + + private ModelAndView getClearNamespacesResult(HttpServletRequest request, HttpServletResponse response) + throws ServerHTTPException, ClientHTTPException { + try (RepositoryConnection repositoryCon = RepositoryInterceptor.getRepositoryConnection(request)) { + try { + repositoryCon.clearNamespaces(); + } catch (RepositoryException e) { + throw new ServerHTTPException("Repository error: " + e.getMessage(), e); + } + + return new ModelAndView(EmptySuccessView.getInstance()); + } + } +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/resolver/DefaultRepositoryResolver.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/resolver/DefaultRepositoryResolver.java new file mode 100644 index 00000000000..eb7dffb8fda --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/resolver/DefaultRepositoryResolver.java @@ -0,0 +1,56 @@ +/******************************************************************************* + * Copyright (c) 2022 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.http.server.repository.resolver; + +import org.eclipse.rdf4j.http.server.repository.RepositoryInterceptor; +import org.eclipse.rdf4j.model.Model; +import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.eclipse.rdf4j.repository.config.RepositoryConfig; +import org.eclipse.rdf4j.repository.config.RepositoryConfigUtil; +import org.eclipse.rdf4j.repository.manager.RepositoryManager; +import org.eclipse.rdf4j.rio.helpers.BasicParserSettings; + +import jakarta.servlet.http.HttpServletRequest; + +public class DefaultRepositoryResolver implements RepositoryResolver { + + private final RepositoryManager repositoryManager; + + public DefaultRepositoryResolver(RepositoryManager repMan) { + repositoryManager = repMan; + } + + public RepositoryManager getRepositoryManager() { + return repositoryManager; + } + + public RepositoryConfig getRepositoryConfig(String repId, Model model) { + return RepositoryConfigUtil.getRepositoryConfig(model, repId); + } + + public String getRepositoryID(HttpServletRequest request) { + return RepositoryInterceptor.getRepositoryID(request); + } + + public RepositoryConnection getRepositoryConnection(HttpServletRequest request, Repository repository) { + RepositoryConnection conn = repository.getConnection(); + conn.getParserConfig().addNonFatalError(BasicParserSettings.VERIFY_DATATYPE_VALUES); + conn.getParserConfig().addNonFatalError(BasicParserSettings.VERIFY_LANGUAGE_TAGS); + return conn; + } + + public Repository getRepository(HttpServletRequest request) { + return RepositoryInterceptor.getRepository(request); + } + +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/resolver/RepositoryResolver.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/resolver/RepositoryResolver.java new file mode 100644 index 00000000000..2249247d8a0 --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/resolver/RepositoryResolver.java @@ -0,0 +1,39 @@ +/******************************************************************************* + * Copyright (c) 2022 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.http.server.repository.resolver; + +import org.eclipse.rdf4j.model.Model; +import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.eclipse.rdf4j.repository.config.RepositoryConfig; +import org.eclipse.rdf4j.repository.manager.RepositoryManager; + +import jakarta.servlet.http.HttpServletRequest; + +/** + * An adapter interface to allow the {@link org.eclipse.rdf4j.http.server.repository.handler.QueryRequestHandler}, + * {@link org.eclipse.rdf4j.http.server.repository.handler.RepositoryRequestHandler} and + * {@link org.eclipse.rdf4j.http.server.repository.RepositoryController} to get the repository for an HttpRequest. + */ +public interface RepositoryResolver { + + RepositoryManager getRepositoryManager(); + + String getRepositoryID(HttpServletRequest request); + + RepositoryConfig getRepositoryConfig(String repId, Model model); + + RepositoryConnection getRepositoryConnection(HttpServletRequest request, Repository repository); + + Repository getRepository(HttpServletRequest request); + +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/size/SizeController.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/size/SizeController.java new file mode 100644 index 00000000000..394fb7c7bcb --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/size/SizeController.java @@ -0,0 +1,69 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.repository.size; + +import java.util.HashMap; +import java.util.Map; + +import org.eclipse.rdf4j.common.webapp.views.SimpleResponseView; +import org.eclipse.rdf4j.http.protocol.Protocol; +import org.eclipse.rdf4j.http.server.ProtocolUtil; +import org.eclipse.rdf4j.http.server.ServerHTTPException; +import org.eclipse.rdf4j.http.server.repository.RepositoryInterceptor; +import org.eclipse.rdf4j.model.Resource; +import org.eclipse.rdf4j.model.ValueFactory; +import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.eclipse.rdf4j.repository.RepositoryException; +import org.springframework.context.ApplicationContextException; +import org.springframework.web.servlet.ModelAndView; +import org.springframework.web.servlet.mvc.AbstractController; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; + +/** + * Handles requests for the size of (set of contexts in) a repository. + * + * @author Herko ter Horst + */ +public class SizeController extends AbstractController { + + public SizeController() throws ApplicationContextException { + setSupportedMethods(METHOD_GET, METHOD_HEAD); + } + + @Override + protected ModelAndView handleRequestInternal(HttpServletRequest request, HttpServletResponse response) + throws Exception { + ProtocolUtil.logRequestParameters(request); + + Map model = new HashMap<>(); + final boolean headersOnly = METHOD_HEAD.equals(request.getMethod()); + + if (!headersOnly) { + Repository repository = RepositoryInterceptor.getRepository(request); + + ValueFactory vf = repository.getValueFactory(); + Resource[] contexts = ProtocolUtil.parseContextParam(request, Protocol.CONTEXT_PARAM_NAME, vf); + + try (RepositoryConnection repositoryCon = RepositoryInterceptor.getRepositoryConnection(request)) { + long size = repositoryCon.size(contexts); + model.put(SimpleResponseView.CONTENT_KEY, String.valueOf(size)); + } catch (RepositoryException e) { + throw new ServerHTTPException("Repository error: " + e.getMessage(), e); + } + + } + + return new ModelAndView(SimpleResponseView.getInstance(), model); + } +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/statements/ExportStatementsView.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/statements/ExportStatementsView.java new file mode 100644 index 00000000000..475d2d5b39a --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/statements/ExportStatementsView.java @@ -0,0 +1,249 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.repository.statements; + +import static jakarta.servlet.http.HttpServletResponse.SC_OK; + +import java.io.IOException; +import java.io.OutputStream; +import java.nio.charset.Charset; +import java.util.Map; +import java.util.Objects; + +import org.eclipse.rdf4j.http.server.ClientHTTPException; +import org.eclipse.rdf4j.http.server.ServerHTTPException; +import org.eclipse.rdf4j.http.server.repository.RepositoryInterceptor; +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.Resource; +import org.eclipse.rdf4j.model.Statement; +import org.eclipse.rdf4j.model.Value; +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.eclipse.rdf4j.repository.RepositoryException; +import org.eclipse.rdf4j.rio.RDFFormat; +import org.eclipse.rdf4j.rio.RDFHandler; +import org.eclipse.rdf4j.rio.RDFHandlerException; +import org.eclipse.rdf4j.rio.RDFWriter; +import org.eclipse.rdf4j.rio.RDFWriterFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.web.servlet.View; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; + +/** + * Streams statements as RDF in the format requested by the client. + * + * @author Herko ter Horst + */ +public class ExportStatementsView implements View { + + private static final Logger logger = LoggerFactory.getLogger(ExportStatementsView.class); + + public static final String SUBJECT_KEY = "subject"; + public static final String PREDICATE_KEY = "predicate"; + public static final String OBJECT_KEY = "object"; + public static final String CONTEXTS_KEY = "contexts"; + public static final String USE_INFERENCING_KEY = "useInferencing"; + public static final String CONNECTION_KEY = "connection"; + public static final String TRANSACTION_ID_KEY = "transactionID"; + public static final String FACTORY_KEY = "factory"; + public static final String HEADERS_ONLY = "headersOnly"; + + private static final ExportStatementsView INSTANCE = new ExportStatementsView(); + public static int MAX_NUMBER_OF_STATEMENTS_WHEN_TESTING_FOR_POSSIBLE_EXCEPTIONS; + + static { + int max = 1024; // default value + String maxStatements = System.getProperty( + "org.eclipse.rdf4j.http.server.repository.statements.ExportStatementsView.MAX_NUMBER_OF_STATEMENTS_WHEN_TESTING_FOR_POSSIBLE_EXCEPTIONS"); + if (maxStatements != null) { + try { + int userMax = Integer.parseInt(maxStatements); + if (userMax >= -1) { + max = userMax; + } else { + logger.warn( + "Invalid value for MAX_NUMBER_OF_STATEMENTS_WHEN_TESTING_FOR_POSSIBLE_EXCEPTIONS: {}, must be >= -1, using default value of {}.", + maxStatements, max); + } + } catch (NumberFormatException e) { + logger.warn("Invalid value for MAX_NUMBER_OF_STATEMENTS_WHEN_TESTING_FOR_POSSIBLE_EXCEPTIONS: " + + maxStatements, e); + } + } + MAX_NUMBER_OF_STATEMENTS_WHEN_TESTING_FOR_POSSIBLE_EXCEPTIONS = max; + } + + public static ExportStatementsView getInstance() { + return INSTANCE; + } + + private ExportStatementsView() { + } + + @Override + public String getContentType() { + // Spring ignores this for View implementations; we set it in render(). + return null; + } + + @Override + public void render(Map model, HttpServletRequest request, HttpServletResponse response) throws Exception { + + response.setBufferSize(1024 * 1024); // 1MB + + Resource subj = (Resource) Objects.requireNonNull(model, "model should not be null").get(SUBJECT_KEY); + IRI pred = (IRI) model.get(PREDICATE_KEY); + Value obj = (Value) model.get(OBJECT_KEY); + Resource[] contexts = (Resource[]) model.get(CONTEXTS_KEY); + boolean useInferencing = Boolean.TRUE.equals(model.get(USE_INFERENCING_KEY)); + boolean headersOnly = Boolean.TRUE.equals(model.get(HEADERS_ONLY)); + + RDFWriterFactory factory = (RDFWriterFactory) model.get(FACTORY_KEY); + RDFFormat rdfFormat = factory.getRDFFormat(); + + attemptToDetectExceptions(request, factory, headersOnly, subj, pred, obj, useInferencing, contexts); + + response.setStatus(SC_OK); + + String mimeType = rdfFormat.getDefaultMIMEType(); + if (rdfFormat.hasCharset()) { + Charset charset = rdfFormat.getCharset(); + mimeType += "; charset=" + charset.name(); + } + response.setContentType(mimeType); + + String filename = "statements"; + if (rdfFormat.getDefaultFileExtension() != null) { + filename += "." + rdfFormat.getDefaultFileExtension(); + } + response.setHeader("Content-Disposition", "attachment; filename=" + filename); + + if (headersOnly) { + response.setContentLength(0); + response.flushBuffer(); + return; + } + + try (OutputStream out = response.getOutputStream()) { + RDFWriter writer = factory.getWriter(out); + try (RepositoryConnection conn = RepositoryInterceptor.getRepositoryConnection(request)) { + conn.exportStatements(subj, pred, obj, useInferencing, writer, contexts); + out.flush(); + response.flushBuffer(); + } catch (RDFHandlerException e) { + var serverHTTPException = new ServerHTTPException("Serialization error: " + e.getMessage(), e); + if (!response.isCommitted()) { + response.reset(); + } + throw serverHTTPException; + } catch (RepositoryException e) { + var serverHTTPException = new ServerHTTPException("Repository error: " + e.getMessage(), e); + if (!response.isCommitted()) { + response.reset(); + } + throw serverHTTPException; + } catch (Throwable e) { + if (!response.isCommitted()) { + response.reset(); + } + throw e; + } + + } + + } + + private static void attemptToDetectExceptions(HttpServletRequest request, RDFWriterFactory rdfWriterFactory, + boolean headersOnly, Resource subj, IRI pred, Value obj, boolean useInferencing, Resource[] contexts) + throws IOException, ServerHTTPException { + if (MAX_NUMBER_OF_STATEMENTS_WHEN_TESTING_FOR_POSSIBLE_EXCEPTIONS == 0) { + return; + } + + try (OutputStream out = OutputStream.nullOutputStream()) { + RDFHandler rdfWriter = new LimitedSizeRDFHandler(rdfWriterFactory.getWriter(out), + MAX_NUMBER_OF_STATEMENTS_WHEN_TESTING_FOR_POSSIBLE_EXCEPTIONS); + if (!headersOnly) { + try (RepositoryConnection conn = RepositoryInterceptor.getRepositoryConnection(request)) { + conn.exportStatements(subj, pred, obj, useInferencing, rdfWriter, contexts); + } catch (RDFHandlerException e) { + throw new ServerHTTPException("Serialization error: " + e.getMessage(), e); + } catch (RepositoryException e) { + throw new ServerHTTPException("Repository error: " + e.getMessage(), e); + } catch (ClientHTTPException e) { + throw new ServerHTTPException("Client error: " + e.getMessage(), e); + } catch (LimitedSizeReachedException ignored) { + } + } + } + } + + private static class LimitedSizeRDFHandler implements RDFHandler { + + private final RDFHandler delegate; + private final long maxSize; + private long currentSize = 0; + + public LimitedSizeRDFHandler(RDFHandler delegate, long maxSize) { + this.delegate = delegate; + this.maxSize = maxSize; + } + + @Override + public void startRDF() throws RDFHandlerException { + delegate.startRDF(); + } + + @Override + public void endRDF() throws RDFHandlerException { + delegate.endRDF(); + } + + @Override + public void handleNamespace(String prefix, String uri) throws RDFHandlerException { + delegate.handleNamespace(prefix, uri); + incrementCurrentSize(); + } + + @Override + public void handleStatement(Statement st) throws RDFHandlerException { + delegate.handleStatement(st); + incrementCurrentSize(); + } + + @Override + public void handleComment(String comment) throws RDFHandlerException { + delegate.handleComment(comment); + incrementCurrentSize(); + } + + private void incrementCurrentSize() { + currentSize++; + if (maxSize >= 0 && currentSize > maxSize) { + endRDF(); + logger.trace( + "Limited size reached, throwing LimitedSizeReachedException to signal that we are done testing the export of statements for exceptions."); + throw new LimitedSizeReachedException(); + } + } + } + + private static class LimitedSizeReachedException extends RuntimeException { + @Override + public Throwable fillInStackTrace() { + // Do not fill in the stack trace to avoid performance overhead + return this; + } + } + +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/statements/StatementsController.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/statements/StatementsController.java new file mode 100644 index 00000000000..110643f721b --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/statements/StatementsController.java @@ -0,0 +1,458 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.repository.statements; + +import static jakarta.servlet.http.HttpServletResponse.SC_BAD_REQUEST; +import static jakarta.servlet.http.HttpServletResponse.SC_SERVICE_UNAVAILABLE; +import static jakarta.servlet.http.HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE; +import static org.eclipse.rdf4j.http.protocol.Protocol.BASEURI_PARAM_NAME; +import static org.eclipse.rdf4j.http.protocol.Protocol.BINDING_PREFIX; +import static org.eclipse.rdf4j.http.protocol.Protocol.CONTEXT_PARAM_NAME; +import static org.eclipse.rdf4j.http.protocol.Protocol.INCLUDE_INFERRED_PARAM_NAME; +import static org.eclipse.rdf4j.http.protocol.Protocol.INSERT_GRAPH_PARAM_NAME; +import static org.eclipse.rdf4j.http.protocol.Protocol.OBJECT_PARAM_NAME; +import static org.eclipse.rdf4j.http.protocol.Protocol.PREDICATE_PARAM_NAME; +import static org.eclipse.rdf4j.http.protocol.Protocol.QUERY_LANGUAGE_PARAM_NAME; +import static org.eclipse.rdf4j.http.protocol.Protocol.REMOVE_GRAPH_PARAM_NAME; +import static org.eclipse.rdf4j.http.protocol.Protocol.SUBJECT_PARAM_NAME; +import static org.eclipse.rdf4j.http.protocol.Protocol.USING_GRAPH_PARAM_NAME; +import static org.eclipse.rdf4j.http.protocol.Protocol.USING_NAMED_GRAPH_PARAM_NAME; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Enumeration; +import java.util.HashMap; +import java.util.Map; + +import org.apache.commons.io.IOUtils; +import org.eclipse.rdf4j.common.webapp.util.HttpServerUtil; +import org.eclipse.rdf4j.common.webapp.views.EmptySuccessView; +import org.eclipse.rdf4j.http.protocol.Protocol; +import org.eclipse.rdf4j.http.protocol.error.ErrorInfo; +import org.eclipse.rdf4j.http.protocol.error.ErrorType; +import org.eclipse.rdf4j.http.protocol.transaction.TransactionReader; +import org.eclipse.rdf4j.http.protocol.transaction.operations.TransactionOperation; +import org.eclipse.rdf4j.http.server.ClientHTTPException; +import org.eclipse.rdf4j.http.server.HTTPException; +import org.eclipse.rdf4j.http.server.ProtocolUtil; +import org.eclipse.rdf4j.http.server.ServerHTTPException; +import org.eclipse.rdf4j.http.server.repository.RepositoryInterceptor; +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.Resource; +import org.eclipse.rdf4j.model.Value; +import org.eclipse.rdf4j.model.ValueFactory; +import org.eclipse.rdf4j.query.MalformedQueryException; +import org.eclipse.rdf4j.query.QueryInterruptedException; +import org.eclipse.rdf4j.query.QueryLanguage; +import org.eclipse.rdf4j.query.Update; +import org.eclipse.rdf4j.query.UpdateExecutionException; +import org.eclipse.rdf4j.query.impl.SimpleDataset; +import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.eclipse.rdf4j.repository.RepositoryException; +import org.eclipse.rdf4j.rio.RDFFormat; +import org.eclipse.rdf4j.rio.RDFParseException; +import org.eclipse.rdf4j.rio.RDFWriterFactory; +import org.eclipse.rdf4j.rio.RDFWriterRegistry; +import org.eclipse.rdf4j.rio.Rio; +import org.eclipse.rdf4j.rio.UnsupportedRDFormatException; +import org.eclipse.rdf4j.rio.helpers.BasicParserSettings; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.context.ApplicationContextException; +import org.springframework.web.servlet.ModelAndView; +import org.springframework.web.servlet.mvc.AbstractController; +import org.xml.sax.SAXException; +import org.xml.sax.SAXParseException; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; + +/** + * Handles requests for manipulating the statements in a repository. + * + * @author Herko ter Horst + * @author Arjohn Kampman + */ +public class StatementsController extends AbstractController { + + private final Logger logger = LoggerFactory.getLogger(this.getClass()); + + public StatementsController() throws ApplicationContextException { + setSupportedMethods(METHOD_GET, METHOD_POST, METHOD_HEAD, "PUT", "DELETE"); + } + + @Override + protected ModelAndView handleRequestInternal(HttpServletRequest request, HttpServletResponse response) + throws Exception { + ModelAndView result; + + Repository repository = RepositoryInterceptor.getRepository(request); + + String reqMethod = request.getMethod(); + + if (METHOD_GET.equals(reqMethod)) { + logger.info("GET statements"); + result = getExportStatementsResult(repository, request, response); + } else if (METHOD_HEAD.equals(reqMethod)) { + logger.info("HEAD statements"); + result = getExportStatementsResult(repository, request, response); + } else if (METHOD_POST.equals(reqMethod)) { + String mimeType = HttpServerUtil.getMIMEType(request.getContentType()); + + if (Protocol.TXN_MIME_TYPE.equals(mimeType)) { + logger.info("POST transaction to repository"); + result = getTransactionResultResult(repository, request, response); + } else if (Protocol.SPARQL_UPDATE_MIME_TYPE.equals(mimeType) + || request.getParameterMap().containsKey(Protocol.UPDATE_PARAM_NAME)) { + logger.info("POST SPARQL update request to repository"); + result = getSparqlUpdateResult(repository, request, response); + } else { + logger.info("POST data to repository"); + result = getAddDataResult(repository, request, response, false); + } + } else if ("PUT".equals(reqMethod)) { + logger.info("PUT data in repository"); + result = getAddDataResult(repository, request, response, true); + } else if ("DELETE".equals(reqMethod)) { + logger.info("DELETE data from repository"); + result = getDeleteDataResult(repository, request, response); + } else { + throw new ClientHTTPException(HttpServletResponse.SC_METHOD_NOT_ALLOWED, + "Method not allowed: " + reqMethod); + } + + return result; + } + + private ModelAndView getSparqlUpdateResult(Repository repository, HttpServletRequest request, + HttpServletResponse response) throws ServerHTTPException, ClientHTTPException, HTTPException { + ProtocolUtil.logRequestParameters(request); + + String mimeType = HttpServerUtil.getMIMEType(request.getContentType()); + + String sparqlUpdateString; + if (Protocol.SPARQL_UPDATE_MIME_TYPE.equals(mimeType)) { + // The query should be the entire body + try { + sparqlUpdateString = IOUtils.toString(request.getReader()); + } catch (IOException e) { + throw new ClientHTTPException(SC_BAD_REQUEST, "Error reading request message body", e); + } + } else { + sparqlUpdateString = request.getParameterValues(Protocol.UPDATE_PARAM_NAME)[0]; + } + + if (sparqlUpdateString.isEmpty()) { + throw new ClientHTTPException("Updates must be non-empty"); + } + + if (logger.isDebugEnabled()) { + final int queryHashCode = sparqlUpdateString.hashCode(); + logger.debug("update query {} = {}", queryHashCode, sparqlUpdateString); + } + + // default query language is SPARQL + QueryLanguage queryLn = QueryLanguage.SPARQL; + + String queryLnStr = request.getParameter(QUERY_LANGUAGE_PARAM_NAME); + logger.debug("query language param = {}", queryLnStr); + + if (queryLnStr != null) { + queryLn = QueryLanguage.valueOf(queryLnStr); + + if (queryLn == null) { + throw new ClientHTTPException(SC_BAD_REQUEST, "Unknown query language: " + queryLnStr); + } + } + + String baseURI = request.getParameter(Protocol.BASEURI_PARAM_NAME); + + // determine if inferred triples should be included in query evaluation + boolean includeInferred = ProtocolUtil.parseBooleanParam(request, INCLUDE_INFERRED_PARAM_NAME, true); + + // build a dataset, if specified + String[] defaultRemoveGraphURIs = request.getParameterValues(REMOVE_GRAPH_PARAM_NAME); + String[] defaultInsertGraphURIs = request.getParameterValues(INSERT_GRAPH_PARAM_NAME); + String[] defaultGraphURIs = request.getParameterValues(USING_GRAPH_PARAM_NAME); + String[] namedGraphURIs = request.getParameterValues(USING_NAMED_GRAPH_PARAM_NAME); + + SimpleDataset dataset = null; + if (defaultRemoveGraphURIs != null || defaultInsertGraphURIs != null || defaultGraphURIs != null + || namedGraphURIs != null) { + dataset = new SimpleDataset(); + } + + if (defaultRemoveGraphURIs != null) { + for (String graphURI : defaultRemoveGraphURIs) { + try { + IRI uri = createURIOrNull(repository, graphURI); + dataset.addDefaultRemoveGraph(uri); + } catch (IllegalArgumentException e) { + throw new ClientHTTPException(SC_BAD_REQUEST, "Illegal URI for default remove graph: " + graphURI); + } + } + } + + if (defaultInsertGraphURIs != null && defaultInsertGraphURIs.length > 0) { + String graphURI = defaultInsertGraphURIs[0]; + try { + IRI uri = createURIOrNull(repository, graphURI); + dataset.setDefaultInsertGraph(uri); + } catch (IllegalArgumentException e) { + throw new ClientHTTPException(SC_BAD_REQUEST, "Illegal URI for default insert graph: " + graphURI); + } + } + + if (defaultGraphURIs != null) { + for (String defaultGraphURI : defaultGraphURIs) { + try { + IRI uri = createURIOrNull(repository, defaultGraphURI); + dataset.addDefaultGraph(uri); + } catch (IllegalArgumentException e) { + throw new ClientHTTPException(SC_BAD_REQUEST, "Illegal URI for default graph: " + defaultGraphURI); + } + } + } + + if (namedGraphURIs != null) { + for (String namedGraphURI : namedGraphURIs) { + try { + IRI uri = createURIOrNull(repository, namedGraphURI); + dataset.addNamedGraph(uri); + } catch (IllegalArgumentException e) { + throw new ClientHTTPException(SC_BAD_REQUEST, "Illegal URI for named graph: " + namedGraphURI); + } + } + } + + final int maxQueryTime = ProtocolUtil.parseTimeoutParam(request); + try (RepositoryConnection repositoryCon = RepositoryInterceptor.getRepositoryConnection(request)) { + Update update = repositoryCon.prepareUpdate(queryLn, sparqlUpdateString, baseURI); + + update.setIncludeInferred(includeInferred); + update.setMaxExecutionTime(maxQueryTime); + + if (dataset != null) { + update.setDataset(dataset); + } + + // determine if any variable bindings have been set on this + // update. + @SuppressWarnings("unchecked") + Enumeration parameterNames = request.getParameterNames(); + + while (parameterNames.hasMoreElements()) { + String parameterName = parameterNames.nextElement(); + + if (parameterName.startsWith(BINDING_PREFIX) && parameterName.length() > BINDING_PREFIX.length()) { + String bindingName = parameterName.substring(BINDING_PREFIX.length()); + Value bindingValue = ProtocolUtil.parseValueParam(request, parameterName, + repository.getValueFactory()); + update.setBinding(bindingName, bindingValue); + } + } + + update.execute(); + + return new ModelAndView(EmptySuccessView.getInstance()); + } catch (QueryInterruptedException e) { + throw new ServerHTTPException(SC_SERVICE_UNAVAILABLE, "update execution took too long"); + } catch (UpdateExecutionException | RepositoryException e) { + if (e.getCause() != null && e.getCause() instanceof HTTPException) { + // custom signal from the backend, throw as HTTPException + // directly + // (see SES-1016). + throw (HTTPException) e.getCause(); + } else { + throw new ServerHTTPException("Repository update error: " + e.getMessage(), e); + } + } + // custom signal from the backend, throw as HTTPException + // directly + // (see SES-1016). + catch (MalformedQueryException e) { + ErrorInfo errInfo = new ErrorInfo(ErrorType.MALFORMED_QUERY, e.getMessage()); + throw new ClientHTTPException(SC_BAD_REQUEST, errInfo.toString()); + } + } + + private IRI createURIOrNull(Repository repository, String graphURI) { + if ("null".equals(graphURI)) { + return null; + } + return repository.getValueFactory().createIRI(graphURI); + } + + /** + * Get all statements and export them as RDF. + * + * @return a model and view for exporting the statements. + */ + private ModelAndView getExportStatementsResult(Repository repository, HttpServletRequest request, + HttpServletResponse response) throws ClientHTTPException { + ProtocolUtil.logRequestParameters(request); + + ValueFactory vf = repository.getValueFactory(); + + Resource subj = ProtocolUtil.parseResourceParam(request, SUBJECT_PARAM_NAME, vf); + IRI pred = ProtocolUtil.parseURIParam(request, PREDICATE_PARAM_NAME, vf); + Value obj = ProtocolUtil.parseValueParam(request, OBJECT_PARAM_NAME, vf); + Resource[] contexts = ProtocolUtil.parseContextParam(request, CONTEXT_PARAM_NAME, vf); + boolean useInferencing = ProtocolUtil.parseBooleanParam(request, INCLUDE_INFERRED_PARAM_NAME, true); + + RDFWriterFactory rdfWriterFactory = ProtocolUtil.getAcceptableService(request, response, + RDFWriterRegistry.getInstance()); + + Map model = new HashMap<>(); + model.put(ExportStatementsView.SUBJECT_KEY, subj); + model.put(ExportStatementsView.PREDICATE_KEY, pred); + model.put(ExportStatementsView.OBJECT_KEY, obj); + model.put(ExportStatementsView.CONTEXTS_KEY, contexts); + model.put(ExportStatementsView.USE_INFERENCING_KEY, Boolean.valueOf(useInferencing)); + model.put(ExportStatementsView.FACTORY_KEY, rdfWriterFactory); + model.put(ExportStatementsView.HEADERS_ONLY, METHOD_HEAD.equals(request.getMethod())); + return new ModelAndView(ExportStatementsView.getInstance(), model); + } + + /** + * Process several actions as a transaction. + */ + private ModelAndView getTransactionResultResult(Repository repository, HttpServletRequest request, + HttpServletResponse response) throws IOException, ClientHTTPException, ServerHTTPException, HTTPException { + InputStream in = request.getInputStream(); + try (RepositoryConnection repositoryCon = RepositoryInterceptor.getRepositoryConnection(request)) { + logger.debug("Processing transaction..."); + + TransactionReader reader = new TransactionReader(); + Iterable txn = reader.parse(in); + + repositoryCon.begin(); + + for (TransactionOperation op : txn) { + op.execute(repositoryCon); + } + + repositoryCon.commit(); + logger.debug("Transaction processed "); + + return new ModelAndView(EmptySuccessView.getInstance()); + } catch (SAXParseException e) { + ErrorInfo errInfo = new ErrorInfo(ErrorType.MALFORMED_DATA, e.getMessage()); + throw new ClientHTTPException(SC_BAD_REQUEST, errInfo.toString()); + } catch (SAXException e) { + throw new ServerHTTPException("Failed to parse transaction data: " + e.getMessage(), e); + } catch (IOException e) { + throw new ServerHTTPException("Failed to read data: " + e.getMessage(), e); + } catch (RepositoryException e) { + if (e.getCause() != null && e.getCause() instanceof HTTPException) { + // custom signal from the backend, throw as HTTPException + // directly + // (see SES-1016). + throw (HTTPException) e.getCause(); + } else { + throw new ServerHTTPException("Repository update error: " + e.getMessage(), e); + } + } + } + + /** + * Upload data to the repository. + */ + private ModelAndView getAddDataResult(Repository repository, HttpServletRequest request, + HttpServletResponse response, boolean replaceCurrent) + throws IOException, ServerHTTPException, ClientHTTPException, HTTPException { + ProtocolUtil.logRequestParameters(request); + + String mimeType = HttpServerUtil.getMIMEType(request.getContentType()); + + RDFFormat rdfFormat = Rio.getParserFormatForMIMEType(mimeType) + .orElseThrow( + () -> new ClientHTTPException(SC_UNSUPPORTED_MEDIA_TYPE, "Unsupported MIME type: " + mimeType)); + + ValueFactory vf = repository.getValueFactory(); + + Resource[] contexts = ProtocolUtil.parseContextParam(request, CONTEXT_PARAM_NAME, vf); + IRI baseURI = ProtocolUtil.parseURIParam(request, BASEURI_PARAM_NAME, vf); + final boolean preserveNodeIds = ProtocolUtil.parseBooleanParam(request, Protocol.PRESERVE_BNODE_ID_PARAM_NAME, + false); + + String baseURIString = null; + if (baseURI != null) { + baseURIString = baseURI.toString(); + } + + InputStream in = request.getInputStream(); + try (RepositoryConnection repositoryCon = RepositoryInterceptor.getRepositoryConnection(request)) { + repositoryCon.begin(); + + if (preserveNodeIds) { + repositoryCon.getParserConfig().set(BasicParserSettings.PRESERVE_BNODE_IDS, true); + } + + if (replaceCurrent) { + repositoryCon.clear(contexts); + } + repositoryCon.add(in, baseURIString, rdfFormat, contexts); + + repositoryCon.commit(); + + return new ModelAndView(EmptySuccessView.getInstance()); + } catch (UnsupportedRDFormatException e) { + throw new ClientHTTPException(SC_UNSUPPORTED_MEDIA_TYPE, + "No RDF parser available for format " + rdfFormat.getName()); + } catch (RDFParseException e) { + ErrorInfo errInfo = new ErrorInfo(ErrorType.MALFORMED_DATA, e.getMessage()); + throw new ClientHTTPException(SC_BAD_REQUEST, errInfo.toString()); + } catch (IOException e) { + throw new ServerHTTPException("Failed to read data: " + e.getMessage(), e); + } catch (RepositoryException e) { + if (e.getCause() != null && e.getCause() instanceof HTTPException) { + // custom signal from the backend, throw as HTTPException + // directly + // (see SES-1016). + throw (HTTPException) e.getCause(); + } else { + throw new ServerHTTPException("Repository update error: " + e.getMessage(), e); + } + } + } + + /** + * Delete data from the repository. + */ + private ModelAndView getDeleteDataResult(Repository repository, HttpServletRequest request, + HttpServletResponse response) throws ServerHTTPException, ClientHTTPException, HTTPException { + ProtocolUtil.logRequestParameters(request); + + ValueFactory vf = repository.getValueFactory(); + + Resource subj = ProtocolUtil.parseResourceParam(request, SUBJECT_PARAM_NAME, vf); + IRI pred = ProtocolUtil.parseURIParam(request, PREDICATE_PARAM_NAME, vf); + Value obj = ProtocolUtil.parseValueParam(request, OBJECT_PARAM_NAME, vf); + Resource[] contexts = ProtocolUtil.parseContextParam(request, CONTEXT_PARAM_NAME, vf); + + try (RepositoryConnection repositoryCon = RepositoryInterceptor.getRepositoryConnection(request)) { + repositoryCon.remove(subj, pred, obj, contexts); + + return new ModelAndView(EmptySuccessView.getInstance()); + } catch (RepositoryException e) { + if (e.getCause() != null && e.getCause() instanceof HTTPException) { + // custom signal from the backend, throw as HTTPException + // directly + // (see SES-1016). + throw (HTTPException) e.getCause(); + } else { + throw new ServerHTTPException("Repository update error: " + e.getMessage(), e); + } + } + } +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/transaction/ActiveTransactionRegistry.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/transaction/ActiveTransactionRegistry.java new file mode 100644 index 00000000000..4a0a824c3ea --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/transaction/ActiveTransactionRegistry.java @@ -0,0 +1,247 @@ +/******************************************************************************* + * Copyright (c) 2016 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.repository.transaction; + +import java.util.UUID; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.ScheduledFuture; +import java.util.concurrent.TimeUnit; + +import org.eclipse.rdf4j.http.protocol.Protocol; +import org.eclipse.rdf4j.repository.RepositoryException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.cache.Cache; +import com.google.common.cache.CacheBuilder; +import com.google.common.cache.RemovalCause; +import com.google.common.cache.RemovalNotification; + +/** + * Registry keeping track of active transactions identified by a {@link UUID}. + * + * @author Jeen Broekstra + */ +public enum ActiveTransactionRegistry { + + INSTANCE; + + private int timeout = DEFAULT_TIMEOUT; + + private final Logger logger = LoggerFactory.getLogger(ActiveTransactionRegistry.class); + + /** + * Configurable system property {@code rdf4j.server.txn.registry.timeout} for specifying the transaction cache + * timeout (in seconds). + * + * @deprecated Use {@link Protocol#CACHE_TIMEOUT_PROPERTY} + */ + @Deprecated(since = "2.3") + public static final String CACHE_TIMEOUT_PROPERTY = Protocol.TIMEOUT.CACHE_PROPERTY; + + /** + * Default timeout setting for transaction cache entries (in seconds). + * + * @deprecated Use {@link Protocol#DEFAULT_TIMEOUT} + */ + @Deprecated(since = "2.3") + public final static int DEFAULT_TIMEOUT = Protocol.TIMEOUT.DEFAULT; + + /** + * primary cache for transactions, accessible via transaction ID. Cache entries are kept until a transaction signals + * it has ended, or until the secondary cache finds an "orphaned" transaction entry. + */ + private final Cache primaryCache; + + /** + * The secondary cache does automatic cleanup of its entries based on the configured timeout. If an expired + * transaction is no longer active, it is considered "orphaned" and discarded from the primary cache. + */ + private final Cache secondaryCache; + + /** + * a scheduler that routinely cleanup the secondary cache there is no other way to remove stale transactions from + * there if remote clients are gone + */ + private final ScheduledExecutorService cleaupSecondaryCacheScheduler; + private ScheduledFuture cleanupTask; + + private Cache getSecondaryCache() { + return secondaryCache; + } + + /** + * private constructor. + */ + ActiveTransactionRegistry() { + final String configuredValue = System.getProperty(Protocol.CACHE_TIMEOUT_PROPERTY); + if (configuredValue != null) { + try { + timeout = Integer.parseInt(configuredValue); + } catch (NumberFormatException e) { + logger.warn("Expected integer value for property {}. Timeout will default to {} seconds. ", + Protocol.CACHE_TIMEOUT_PROPERTY, Protocol.DEFAULT_TIMEOUT); + } + } + primaryCache = CacheBuilder.newBuilder() + .removalListener((RemovalNotification notification) -> { + UUID transactionId = notification.getKey(); + Transaction entry = notification.getValue(); + try { + logger.debug("primary cache removal txid {}", transactionId); + entry.close(); + } catch (RepositoryException | InterruptedException | ExecutionException e) { + // fall through + } + }) + .build(); + + secondaryCache = CacheBuilder.newBuilder() + .removalListener((RemovalNotification notification) -> { + logger.debug("secondary cache removal"); + if (RemovalCause.EXPIRED.equals(notification.getCause())) { + final UUID transactionId = notification.getKey(); + final Transaction entry = notification.getValue(); + logger.debug("expired transaction to be removed {}", transactionId); + synchronized (primaryCache) { + // no operation active, we can decommission this entry + primaryCache.invalidate(transactionId); + logger.debug("deregistered expired transaction {}", transactionId); + try { + logger.debug("try close() invoked on transaction !!!{}", transactionId); + entry.close(); + } catch (Throwable t) { + logger.debug("error on close when purging {}", t.getMessage()); + } + } + } + }) + .expireAfterAccess(timeout, TimeUnit.SECONDS) + .build(); + cleaupSecondaryCacheScheduler = Executors.newSingleThreadScheduledExecutor(( + + Runnable runnable) -> { + Thread thread = Executors.defaultThreadFactory().newThread(runnable); + thread.setName("rdf4j-cleanup-stn-scheduler"); + thread.setDaemon(true); + return thread; + }); + + // timeout + 10% to force cleanup + cleanupTask = cleaupSecondaryCacheScheduler.schedule(() -> { + cleanUpSecondaryCache(); + }, timeout + timeout / 10, TimeUnit.SECONDS); + logger.debug("secondary cache expire time {} seconds", timeout); + } + + protected void cleanUpSecondaryCache() { + synchronized (primaryCache) { + logger.debug("performing secondary cache cleanup. {}", getSecondaryCache().size()); + getSecondaryCache().cleanUp(); + } + cleanupTask = cleaupSecondaryCacheScheduler.schedule(() -> { + cleanUpSecondaryCache(); + }, timeout + timeout / 10, TimeUnit.SECONDS); + } + + // stops the secondary cache cleanup scheduler. invoked by TransactionController.destroy() + public void destroyScheduler() { + if (cleanupTask != null) + cleanupTask.cancel(false); + cleanupTask = null; + cleaupSecondaryCacheScheduler.shutdownNow(); + logger.debug("ActiveTransactionCache destroy invoked!"); + } + + public long getTimeout(TimeUnit unit) { + return unit.convert(timeout, TimeUnit.SECONDS); + } + + /** + * @param txn + */ + public void register(Transaction txn) { + synchronized (primaryCache) { + Transaction existingTxn = primaryCache.getIfPresent(txn.getID()); + if (existingTxn == null) { + primaryCache.put(txn.getID(), txn); + secondaryCache.put(txn.getID(), txn); + logger.debug("registered transaction {} ", txn.getID()); + } else { + logger.error("transaction already registered: {}", txn.getID()); + throw new RepositoryException("transaction with id " + txn.getID().toString() + " already registered."); + } + } + } + + public Transaction getTransaction(UUID id) { + synchronized (primaryCache) { + Transaction entry = primaryCache.getIfPresent(id); + if (entry == null) { + throw new RepositoryException("transaction with id " + id.toString() + " not registered."); + } + updateSecondaryCache(entry); + return entry; + } + } + + /** + * Resets transaction timeout. If transaction has already timed-out, reinsert the transaction. + * + * @param txn + */ + public void active(Transaction txn) { + synchronized (primaryCache) { + updateSecondaryCache(txn); + Transaction existingTxn = primaryCache.getIfPresent(txn.getID()); + if (existingTxn == null) { + // reinstate transaction that timed-out too soon + primaryCache.put(txn.getID(), txn); + logger.debug("reinstated transaction {} ", txn.getID()); + } + } + } + + /** + * @param transaction + */ + public void deregister(Transaction transaction) { + + synchronized (primaryCache) { + Transaction entry = primaryCache.getIfPresent(transaction.getID()); + if (entry == null) { + throw new RepositoryException( + "transaction with id " + transaction.getID().toString() + " not registered."); + } else { + primaryCache.invalidate(transaction.getID()); + secondaryCache.invalidate(transaction.getID()); + logger.debug("deregistered transaction {}", transaction.getID()); + } + } + } + + /** + * Checks if the given transaction entry is still in the secondary cache (resetting its last access time in the + * process) and if not reinserts it. + * + * @param transaction the transaction to check + */ + private void updateSecondaryCache(final Transaction transaction) { + try { + secondaryCache.get(transaction.getID(), () -> transaction); + logger.debug("secondary cache update transaction {}", transaction.getID()); + } catch (ExecutionException e) { + throw new RuntimeException(e); + } + } +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/transaction/Transaction.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/transaction/Transaction.java new file mode 100644 index 00000000000..92e7ccf8303 --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/transaction/Transaction.java @@ -0,0 +1,506 @@ +/******************************************************************************* + * Copyright (c) 2016 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.repository.transaction; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Map; +import java.util.UUID; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; + +import org.eclipse.rdf4j.common.transaction.IsolationLevel; +import org.eclipse.rdf4j.common.transaction.TransactionSetting; +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.Resource; +import org.eclipse.rdf4j.model.Statement; +import org.eclipse.rdf4j.model.Value; +import org.eclipse.rdf4j.model.vocabulary.RDF4J; +import org.eclipse.rdf4j.model.vocabulary.SESAME; +import org.eclipse.rdf4j.query.BooleanQuery; +import org.eclipse.rdf4j.query.Dataset; +import org.eclipse.rdf4j.query.GraphQuery; +import org.eclipse.rdf4j.query.GraphQueryResult; +import org.eclipse.rdf4j.query.Query; +import org.eclipse.rdf4j.query.QueryLanguage; +import org.eclipse.rdf4j.query.TupleQuery; +import org.eclipse.rdf4j.query.TupleQueryResult; +import org.eclipse.rdf4j.query.Update; +import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.eclipse.rdf4j.repository.RepositoryException; +import org.eclipse.rdf4j.repository.util.RDFInserter; +import org.eclipse.rdf4j.rio.ParserConfig; +import org.eclipse.rdf4j.rio.RDFFormat; +import org.eclipse.rdf4j.rio.RDFHandlerException; +import org.eclipse.rdf4j.rio.RDFParser; +import org.eclipse.rdf4j.rio.RDFWriter; +import org.eclipse.rdf4j.rio.Rio; +import org.eclipse.rdf4j.rio.helpers.AbstractRDFHandler; +import org.eclipse.rdf4j.rio.helpers.BasicParserSettings; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.util.concurrent.ThreadFactoryBuilder; + +/** + * A transaction encapsulates a single {@link Thread} and a {@link RepositoryConnection}, to enable executing all + * operations that are part of the transaction from a single, dedicated thread. This is necessary because + * {@link RepositoryConnection} is not guaranteed thread-safe and we may run into concurrency issues if we attempt to + * share it between the various HTTP Request worker threads. + * + * @author Jeen Broekstra + */ +class Transaction implements AutoCloseable { + + private static final Logger logger = LoggerFactory.getLogger(Transaction.class); + + /** + * Set to true when entering the {@link #close()} method for the first time, to ensure that only a single thread + * executes the close operations. + */ + private final AtomicBoolean isClosed = new AtomicBoolean(false); + + /** + * Set to true when the {@link #close()} method is about to complete for the first invocation. + */ + private final AtomicBoolean closeCompleted = new AtomicBoolean(false); + + private final UUID id; + + private final Repository rep; + + private final RepositoryConnection txnConnection; + + /** + * The {@link ExecutorService} that performs all of the operations related to this Transaction. + */ + private final ExecutorService executor = Executors + .newSingleThreadExecutor(new ThreadFactoryBuilder().setNameFormat("rdf4j-transaction-%d").build()); + + /** + * Counter of the active operations submitted to the executor + */ + private final AtomicInteger activeOperations = new AtomicInteger(); + + /** + * Create a new Transaction for the given {@link Repository}. + * + * @param repository the {@link Repository} on which to open a transaction. + * @throws InterruptedException if the transaction thread is interrupted while opening a connection. + * @throws ExecutionException if an error occurs while opening the connection. + */ + Transaction(Repository repository) throws InterruptedException, ExecutionException { + this.id = UUID.randomUUID(); + this.rep = repository; + this.txnConnection = getTransactionConnection(); + } + + /** + * The identifier of this transaction object. + * + * @return a {@link UUID} that identifies this Transaction. + */ + UUID getID() { + return id; + } + + /** + * Start the transaction. + * + * @param settings the {@link TransactionSetting}s to use for this transaction (including {@link IsolationLevel}). + * Optional vararg argument. + * @throws InterruptedException if the transaction thread is interrupted + * @throws ExecutionException if an error occurs while starting the transaction. + */ + void begin(TransactionSetting... settings) throws InterruptedException, ExecutionException { + Future result = submit(() -> { + txnConnection.begin(settings); + return true; + }); + getFromFuture(result); + } + + /** + * Rolls back all updates in the transaction. + * + * @throws ExecutionException + * @throws InterruptedException + */ + void rollback() throws InterruptedException, ExecutionException { + Future result = submit(() -> { + txnConnection.rollback(); + return true; + }); + getFromFuture(result); + } + + /** + * @throws ExecutionException + * @throws InterruptedException + */ + void prepare() throws InterruptedException, ExecutionException { + Future result = submit(() -> { + txnConnection.prepare(); + return true; + }); + getFromFuture(result); + } + + /** + * @throws ExecutionException + * @throws InterruptedException + */ + void commit() throws InterruptedException, ExecutionException { + Future result = submit(() -> { + txnConnection.commit(); + return true; + }); + getFromFuture(result); + } + + /** + * Prepares a query for evaluation on this transaction. + * + * @param queryLanguage The {@link QueryLanguage query language} in which the query is formulated. + * @param query The query string. + * @param baseURI The base URI to resolve any relative URIs that are in the query against, can be + * null if the query does not contain any relative URIs. + * @return A query ready to be evaluated on this repository. + * @throws InterruptedException if the transaction thread is interrupted + * @throws ExecutionException if an error occurs while executing the operation. + */ + Query prepareQuery(QueryLanguage queryLanguage, String query, String baseURI) + throws InterruptedException, ExecutionException { + Future result = submit(() -> txnConnection.prepareQuery(queryLanguage, query, baseURI)); + return getFromFuture(result); + } + + /** + * Evaluate a TupleQuery in this transaction and return the result. + * + * @param tQuery a {@link TupleQuery} prepared on this transaction. + * @return a {@link TupleQueryResult} + * @throws InterruptedException if the transaction thread is interrupted + * @throws ExecutionException if an error occurs while executing the operation. + */ + TupleQueryResult evaluate(TupleQuery tQuery) throws InterruptedException, ExecutionException { + Future result = submit(tQuery::evaluate); + return getFromFuture(result); + } + + /** + * Evaluate a {@link GraphQuery} in this transaction and return the result. + * + * @param gQuery a {@link GraphQuery} prepared on this transaction. + * @return a {@link GraphQueryResult} + * @throws InterruptedException if the transaction thread is interrupted + * @throws ExecutionException if an error occurs while executing the operation. + */ + GraphQueryResult evaluate(GraphQuery gQuery) throws InterruptedException, ExecutionException { + Future result = submit(gQuery::evaluate); + return getFromFuture(result); + } + + /** + * Evaluate a {@link BooleanQuery} in this transaction and return the result. + * + * @param bQuery a {@link BooleanQuery} prepared on this transaction. + * @return the query result as a boolean + * @throws InterruptedException if the transaction thread is interrupted + * @throws ExecutionException if an error occurs while executing the operation. + */ + boolean evaluate(BooleanQuery bQuery) throws InterruptedException, ExecutionException { + Future result = submit(() -> bQuery.evaluate()); + return getFromFuture(result); + } + + /** + * @param subj + * @param pred + * @param obj + * @param useInferencing + * @param rdfWriter + * @param contexts + * @throws ExecutionException + * @throws InterruptedException + */ + void exportStatements(Resource subj, IRI pred, Value obj, boolean useInferencing, RDFWriter rdfWriter, + Resource... contexts) throws InterruptedException, ExecutionException { + Future result = submit(() -> { + txnConnection.exportStatements(subj, pred, obj, useInferencing, rdfWriter, contexts); + return true; + }); + getFromFuture(result); + } + + /** + * Returns the number of (explicit) statements that are in the specified contexts in this transaction. + * + * @param contexts The context(s) to get the data from. Note that this parameter is a vararg and as such is + * optional. If no contexts are supplied the method operates on the entire repository. + * @return The number of explicit statements from the specified contexts in this transaction. + */ + long getSize(Resource[] contexts) throws InterruptedException, ExecutionException { + Future result = submit(() -> txnConnection.size(contexts)); + return getFromFuture(result); + } + + /** + * Adds RDF data from an {@link InputStream} to the transaction. + * + * @param inputStream + * @param baseURI + * @param format + * @param contexts + * @throws ExecutionException + * @throws InterruptedException + */ + void add(InputStream inputStream, String baseURI, RDFFormat format, boolean preserveBNodes, Resource... contexts) + throws InterruptedException, ExecutionException { + Future result = submit(() -> { + logger.debug("executing add operation"); + try { + if (preserveBNodes) { + // create a reconfigured parser + inserter instead of + // relying on standard + // repositoryconn add method. + RDFParser parser = Rio.createParser(format); + parser.getParserConfig().set(BasicParserSettings.PRESERVE_BNODE_IDS, true); + RDFInserter inserter = new RDFInserter(txnConnection); + inserter.setPreserveBNodeIDs(true); + if (contexts.length > 0) { + inserter.enforceContext(contexts); + } + parser.setRDFHandler(inserter); + parser.parse(inputStream, baseURI); + } else { + txnConnection.add(inputStream, baseURI, format, contexts); + } + return true; + } catch (IOException e) { + throw new RuntimeException(e); + } + }); + getFromFuture(result); + } + + /** + * @param contentType + * @param inputStream + * @param baseURI + * @throws ExecutionException + * @throws InterruptedException + */ + void delete(RDFFormat contentType, InputStream inputStream, String baseURI) + throws InterruptedException, ExecutionException { + Future result = submit(() -> { + logger.debug("executing delete operation"); + RDFParser parser = Rio.createParser(contentType, txnConnection.getValueFactory()); + + parser.setRDFHandler(new WildcardRDFRemover(txnConnection)); + parser.getParserConfig().set(BasicParserSettings.PRESERVE_BNODE_IDS, true); + try { + parser.parse(inputStream, baseURI); + return true; + } catch (IOException e) { + logger.error("error during txn delete operation", e); + throw new RuntimeException(e); + } + }); + getFromFuture(result); + } + + /** + * @param queryLn + * @param sparqlUpdateString + * @param baseURI + * @param includeInferred + * @param dataset + * @param bindings + * @throws ExecutionException + * @throws InterruptedException + */ + void executeUpdate(QueryLanguage queryLn, String sparqlUpdateString, String baseURI, boolean includeInferred, + Dataset dataset, Map bindings) throws InterruptedException, ExecutionException { + Future result = submit(() -> { + Update update = txnConnection.prepareUpdate(queryLn, sparqlUpdateString, baseURI); + update.setIncludeInferred(includeInferred); + if (dataset != null) { + update.setDataset(dataset); + } + for (String bindingName : bindings.keySet()) { + update.setBinding(bindingName, bindings.get(bindingName)); + } + + update.execute(); + return true; + }); + getFromFuture(result); + } + + /** + * Checks if the user has any scheduled tasks for this transaction that have not yet completed. + * + * @return True if there are currently no active tasks being executed for this transaction and false otherwise. + */ + boolean hasActiveOperations() { + return activeOperations.get() > 0; + } + + /** + * Checks if close has been called for this transaction. + * + * @return True if the close method has been called for this transaction. + */ + boolean isClosed() { + return isClosed.get(); + } + + /** + * Checks if close has been completed for this transaction. + * + * @return True if the close operations have been completed. + */ + boolean isComplete() { + return closeCompleted.get(); + } + + /** + * Close this transaction. + * + * @throws InterruptedException + * @throws ExecutionException + */ + @Override + public void close() throws InterruptedException, ExecutionException { + if (isClosed.compareAndSet(false, true)) { + try { + txnConnection.close(); + } finally { + try { + if (!executor.isTerminated()) { + executor.shutdownNow(); + } + } finally { + closeCompleted.set(true); + } + } + } + } + + /** + * Obtains a {@link RepositoryConnection} through the {@link ExecutorService}. + * + * @return A new {@link RepositoryConnection} to use for this Transaction. + * @throws InterruptedException If the execution of the task was interrupted. + * @throws ExecutionException If the execution of the task failed for any reason. + */ + private RepositoryConnection getTransactionConnection() throws InterruptedException, ExecutionException { + // create a new RepositoryConnection with correct parser settings + Future result = submit(() -> { + RepositoryConnection conn = rep.getConnection(); + ParserConfig config = conn.getParserConfig(); + config.addNonFatalError(BasicParserSettings.VERIFY_DATATYPE_VALUES); + config.addNonFatalError(BasicParserSettings.VERIFY_LANGUAGE_TAGS); + + return conn; + }); + return getFromFuture(result); + } + + /** + * Atomically submit the task to the executor and add to our local list used to track whether there are outstanding + * operations for the executor. + * + * @param callable The task to submit + * @return A {@link Future} that can be used to track whether the operation has succeeded and get the result. + */ + private Future submit(final Callable callable) { + final Future result = executor.submit(callable); + // increment the counter of the active operations + // note that it need to be decremented once the Future completes + activeOperations.incrementAndGet(); + return result; + } + + /** + * Atomically submit the task to the executor and add to our local list used to track whether there are outstanding + * operations for the executor. In addition, this atomically shuts down the ExecutorService to prevent future + * submissions from succeeding. + * + * @param callable The task to submit + * @return A {@link Future} that can be used to track whether the operation has succeeded and get the result. + */ + private Future submitAndShutdown(final Callable callable) { + final Future result = executor.submit(callable); + // increment the counter of the active operations + // note that it need to be decremented once the Future completes + activeOperations.incrementAndGet(); + executor.shutdown(); + return result; + } + + private T getFromFuture(Future result) throws InterruptedException, ExecutionException { + try { + return result.get(); + } finally { + activeOperations.decrementAndGet(); + } + } + + private static class WildcardRDFRemover extends AbstractRDFHandler { + + private static final Resource[] ALL_CONTEXT = {}; + private static final Resource[] DEFAULT_CONTEXT = { null }; + + private final RepositoryConnection conn; + + public WildcardRDFRemover(RepositoryConnection conn) { + super(); + this.conn = conn; + } + + @Override + public void handleStatement(Statement st) throws RDFHandlerException { + Resource subject = SESAME.WILDCARD.equals(st.getSubject()) ? null : st.getSubject(); + IRI predicate = SESAME.WILDCARD.equals(st.getPredicate()) ? null : st.getPredicate(); + Value object = SESAME.WILDCARD.equals(st.getObject()) ? null : st.getObject(); + + Resource[] context; + if (st.getContext() == null) { + context = ALL_CONTEXT; + } else if (RDF4J.NIL.equals(st.getContext())) { + context = DEFAULT_CONTEXT; + } else { + context = new Resource[] { st.getContext() }; + } + + try { + if (subject == null && predicate == null && object == null) { + // use the RepositoryConnection.clear operation if we're removing all statements + conn.clear(context); + } else { + conn.remove(subject, predicate, object, context); + } + } catch (RepositoryException e) { + throw new RDFHandlerException(e); + } + + } + + } +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/transaction/TransactionController.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/transaction/TransactionController.java new file mode 100644 index 00000000000..312165c8f6a --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/transaction/TransactionController.java @@ -0,0 +1,702 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.repository.transaction; + +import static jakarta.servlet.http.HttpServletResponse.SC_BAD_REQUEST; +import static jakarta.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR; +import static jakarta.servlet.http.HttpServletResponse.SC_NOT_ACCEPTABLE; +import static jakarta.servlet.http.HttpServletResponse.SC_SERVICE_UNAVAILABLE; +import static org.eclipse.rdf4j.http.protocol.Protocol.BINDING_PREFIX; +import static org.eclipse.rdf4j.http.protocol.Protocol.CONTEXT_PARAM_NAME; +import static org.eclipse.rdf4j.http.protocol.Protocol.DEFAULT_GRAPH_PARAM_NAME; +import static org.eclipse.rdf4j.http.protocol.Protocol.INCLUDE_INFERRED_PARAM_NAME; +import static org.eclipse.rdf4j.http.protocol.Protocol.INSERT_GRAPH_PARAM_NAME; +import static org.eclipse.rdf4j.http.protocol.Protocol.NAMED_GRAPH_PARAM_NAME; +import static org.eclipse.rdf4j.http.protocol.Protocol.OBJECT_PARAM_NAME; +import static org.eclipse.rdf4j.http.protocol.Protocol.PREDICATE_PARAM_NAME; +import static org.eclipse.rdf4j.http.protocol.Protocol.QUERY_LANGUAGE_PARAM_NAME; +import static org.eclipse.rdf4j.http.protocol.Protocol.QUERY_PARAM_NAME; +import static org.eclipse.rdf4j.http.protocol.Protocol.REMOVE_GRAPH_PARAM_NAME; +import static org.eclipse.rdf4j.http.protocol.Protocol.SUBJECT_PARAM_NAME; +import static org.eclipse.rdf4j.http.protocol.Protocol.USING_GRAPH_PARAM_NAME; +import static org.eclipse.rdf4j.http.protocol.Protocol.USING_NAMED_GRAPH_PARAM_NAME; + +import java.io.IOException; +import java.nio.charset.Charset; +import java.nio.charset.StandardCharsets; +import java.util.Collections; +import java.util.Enumeration; +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; +import java.util.UUID; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; + +import org.apache.commons.io.IOUtils; +import org.eclipse.rdf4j.common.lang.FileFormat; +import org.eclipse.rdf4j.common.lang.service.FileFormatServiceRegistry; +import org.eclipse.rdf4j.common.webapp.views.EmptySuccessView; +import org.eclipse.rdf4j.common.webapp.views.SimpleResponseView; +import org.eclipse.rdf4j.http.protocol.Protocol; +import org.eclipse.rdf4j.http.protocol.Protocol.Action; +import org.eclipse.rdf4j.http.protocol.error.ErrorInfo; +import org.eclipse.rdf4j.http.protocol.error.ErrorType; +import org.eclipse.rdf4j.http.server.ClientHTTPException; +import org.eclipse.rdf4j.http.server.HTTPException; +import org.eclipse.rdf4j.http.server.ProtocolUtil; +import org.eclipse.rdf4j.http.server.ServerHTTPException; +import org.eclipse.rdf4j.http.server.repository.BooleanQueryResultView; +import org.eclipse.rdf4j.http.server.repository.GraphQueryResultView; +import org.eclipse.rdf4j.http.server.repository.QueryResultView; +import org.eclipse.rdf4j.http.server.repository.RepositoryInterceptor; +import org.eclipse.rdf4j.http.server.repository.TupleQueryResultView; +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.Resource; +import org.eclipse.rdf4j.model.Value; +import org.eclipse.rdf4j.model.ValueFactory; +import org.eclipse.rdf4j.model.impl.SimpleValueFactory; +import org.eclipse.rdf4j.query.BooleanQuery; +import org.eclipse.rdf4j.query.GraphQuery; +import org.eclipse.rdf4j.query.MalformedQueryException; +import org.eclipse.rdf4j.query.Query; +import org.eclipse.rdf4j.query.QueryEvaluationException; +import org.eclipse.rdf4j.query.QueryInterruptedException; +import org.eclipse.rdf4j.query.QueryLanguage; +import org.eclipse.rdf4j.query.TupleQuery; +import org.eclipse.rdf4j.query.UnsupportedQueryLanguageException; +import org.eclipse.rdf4j.query.UpdateExecutionException; +import org.eclipse.rdf4j.query.impl.SimpleDataset; +import org.eclipse.rdf4j.query.resultio.BooleanQueryResultWriterRegistry; +import org.eclipse.rdf4j.query.resultio.TupleQueryResultWriterRegistry; +import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.RepositoryException; +import org.eclipse.rdf4j.rio.RDFFormat; +import org.eclipse.rdf4j.rio.RDFWriterFactory; +import org.eclipse.rdf4j.rio.RDFWriterRegistry; +import org.eclipse.rdf4j.rio.Rio; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.DisposableBean; +import org.springframework.context.ApplicationContextException; +import org.springframework.web.servlet.ModelAndView; +import org.springframework.web.servlet.View; +import org.springframework.web.servlet.mvc.AbstractController; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; + +/** + * Handles requests for transaction creation on a repository. + * + * @author Jeen Broekstra + */ +public class TransactionController extends AbstractController implements DisposableBean { + + private final Logger logger = LoggerFactory.getLogger(this.getClass()); + + public TransactionController() throws ApplicationContextException { + setSupportedMethods(METHOD_POST, "PUT", "DELETE"); + } + + @Override + protected ModelAndView handleRequestInternal(HttpServletRequest request, HttpServletResponse response) + throws Exception { + ModelAndView result; + + String reqMethod = request.getMethod(); + UUID transactionId = getTransactionID(request); + logger.debug("transaction id: {}", transactionId); + logger.debug("request content type: {}", request.getContentType()); + + Transaction transaction = ActiveTransactionRegistry.INSTANCE.getTransaction(transactionId); + + if (transaction == null) { + logger.warn("could not find transaction for transaction id {}", transactionId); + throw new ClientHTTPException(SC_BAD_REQUEST, + "unable to find registered transaction for transaction id '" + transactionId + "'"); + } + + // if no action is specified in the request, it's a rollback (since it's + // the only txn operation that does not require the action parameter). + final String actionParam = request.getParameter(Protocol.ACTION_PARAM_NAME); + final Action action = actionParam != null ? Action.valueOf(actionParam) : Action.ROLLBACK; + switch (action) { + case QUERY: + // TODO SES-2238 note that we allow POST requests for backward + // compatibility reasons with earlier + // 2.8.x releases, even though according to the protocol spec only + // PUT is allowed. + if ("PUT".equals(reqMethod) || METHOD_POST.equals(reqMethod)) { + logger.info("{} txn query request", reqMethod); + result = processQuery(transaction, request, response); + logger.info("{} txn query request finished", reqMethod); + } else { + throw new ClientHTTPException(HttpServletResponse.SC_METHOD_NOT_ALLOWED, + "Method not allowed: " + reqMethod); + } + break; + case GET: + if ("PUT".equals(reqMethod) || METHOD_POST.equals(reqMethod)) { + logger.info("{} txn get/export statements request", reqMethod); + result = getExportStatementsResult(transaction, request, response); + logger.info("{} txn get/export statements request finished", reqMethod); + } else { + throw new ClientHTTPException(HttpServletResponse.SC_METHOD_NOT_ALLOWED, + "Method not allowed: " + reqMethod); + } + break; + case SIZE: + if ("PUT".equals(reqMethod) || METHOD_POST.equals(reqMethod)) { + logger.info("{} txn size request", reqMethod); + result = getSize(transaction, request, response); + logger.info("{} txn size request finished", reqMethod); + } else { + throw new ClientHTTPException(HttpServletResponse.SC_METHOD_NOT_ALLOWED, + "Method not allowed: " + reqMethod); + } + break; + case PING: + String text = Long.toString(ActiveTransactionRegistry.INSTANCE.getTimeout(TimeUnit.MILLISECONDS)); + Map model = Collections.singletonMap(SimpleResponseView.CONTENT_KEY, text); + result = new ModelAndView(SimpleResponseView.getInstance(), model); + break; + default: + // TODO Action.ROLLBACK check is for backward compatibility with + // older 2.8.x releases only. It's not in the protocol spec. + if ("DELETE".equals(reqMethod) + || (action.equals(Action.ROLLBACK) && ("PUT".equals(reqMethod) || METHOD_POST.equals(reqMethod)))) { + logger.info("transaction rollback"); + try { + transaction.rollback(); + } finally { + try { + transaction.close(); + } finally { + ActiveTransactionRegistry.INSTANCE.deregister(transaction); + } + } + result = new ModelAndView(EmptySuccessView.getInstance()); + logger.info("transaction rollback request finished."); + } else if ("PUT".equals(reqMethod) || METHOD_POST.equals(reqMethod)) { + // TODO filter for appropriate PUT operations + logger.info("{} txn operation", reqMethod); + result = processModificationOperation(transaction, action, request, response); + logger.info("PUT txn operation request finished."); + } else { + throw new ClientHTTPException(HttpServletResponse.SC_METHOD_NOT_ALLOWED, + "Method not allowed: " + reqMethod); + } + break; + } + if (!(transaction.isClosed() || transaction.isComplete())) { + ActiveTransactionRegistry.INSTANCE.active(transaction); + } + return result; + } + + private UUID getTransactionID(HttpServletRequest request) throws ClientHTTPException { + String pathInfoStr = request.getPathInfo(); + + UUID txnID = null; + + if (pathInfoStr != null && !pathInfoStr.equals("/")) { + String[] pathInfo = pathInfoStr.substring(1).split("/"); + // should be of the form: //transactions/ + if (pathInfo.length == 3) { + try { + txnID = UUID.fromString(pathInfo[2]); + logger.debug("txnID is '{}'", txnID); + } catch (IllegalArgumentException e) { + throw new ClientHTTPException(SC_BAD_REQUEST, "not a valid transaction id: " + pathInfo[2]); + } + } else { + logger.warn("could not determine transaction id from path info {} ", pathInfoStr); + } + } + + return txnID; + } + + private ModelAndView processModificationOperation(Transaction transaction, Action action, + HttpServletRequest request, HttpServletResponse response) throws IOException, HTTPException { + ProtocolUtil.logRequestParameters(request); + + Map model = new HashMap<>(); + + String baseURI = request.getParameter(Protocol.BASEURI_PARAM_NAME); + if (baseURI == null) { + baseURI = ""; + } + + final Resource[] contexts = ProtocolUtil.parseContextParam(request, CONTEXT_PARAM_NAME, + SimpleValueFactory.getInstance()); + + final boolean preserveNodeIds = ProtocolUtil.parseBooleanParam(request, Protocol.PRESERVE_BNODE_ID_PARAM_NAME, + false); + + try { + RDFFormat format; + switch (action) { + case ADD: + format = Rio.getParserFormatForMIMEType(request.getContentType()) + .orElseThrow(Rio.unsupportedFormat(request.getContentType())); + transaction.add(request.getInputStream(), baseURI, format, preserveNodeIds, contexts); + break; + case DELETE: + format = Rio.getParserFormatForMIMEType(request.getContentType()) + .orElseThrow(Rio.unsupportedFormat(request.getContentType())); + transaction.delete(format, request.getInputStream(), baseURI); + break; + case UPDATE: + return getSparqlUpdateResult(transaction, request, response); + case PREPARE: + transaction.prepare(); + break; + case COMMIT: + transaction.commit(); + // If commit fails with an exception, deregister should be skipped so the user + // has a chance to do a proper rollback. See #725. + ActiveTransactionRegistry.INSTANCE.deregister(transaction); + break; + default: + logger.warn("transaction modification action '{}' not recognized", action); + throw new ClientHTTPException("modification action not recognized: " + action); + } + + model.put(SimpleResponseView.SC_KEY, HttpServletResponse.SC_OK); + return new ModelAndView(SimpleResponseView.getInstance(), model); + } catch (Exception e) { + if (e instanceof ClientHTTPException) { + throw (ClientHTTPException) e; + } else { + throw new ServerHTTPException(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, + "Transaction handling error: " + e.getMessage(), e); + } + } + } + + private ModelAndView getSize(Transaction transaction, HttpServletRequest request, HttpServletResponse response) + throws HTTPException { + ProtocolUtil.logRequestParameters(request); + + Map model = new HashMap<>(); + final boolean headersOnly = METHOD_HEAD.equals(request.getMethod()); + + if (!headersOnly) { + Repository repository = RepositoryInterceptor.getRepository(request); + + ValueFactory vf = repository.getValueFactory(); + Resource[] contexts = ProtocolUtil.parseContextParam(request, Protocol.CONTEXT_PARAM_NAME, vf); + + long size; + + try { + size = transaction.getSize(contexts); + } catch (RepositoryException | InterruptedException | ExecutionException e) { + throw new ServerHTTPException("Repository error: " + e.getMessage(), e); + } + model.put(SimpleResponseView.CONTENT_KEY, String.valueOf(size)); + } + + return new ModelAndView(SimpleResponseView.getInstance(), model); + } + + /** + * Get all statements and export them as RDF. + * + * @return a model and view for exporting the statements. + */ + private ModelAndView getExportStatementsResult(Transaction transaction, HttpServletRequest request, + HttpServletResponse response) throws ClientHTTPException { + ProtocolUtil.logRequestParameters(request); + + ValueFactory vf = SimpleValueFactory.getInstance(); + + Resource subj = ProtocolUtil.parseResourceParam(request, SUBJECT_PARAM_NAME, vf); + IRI pred = ProtocolUtil.parseURIParam(request, PREDICATE_PARAM_NAME, vf); + Value obj = ProtocolUtil.parseValueParam(request, OBJECT_PARAM_NAME, vf); + Resource[] contexts = ProtocolUtil.parseContextParam(request, CONTEXT_PARAM_NAME, vf); + boolean useInferencing = ProtocolUtil.parseBooleanParam(request, INCLUDE_INFERRED_PARAM_NAME, true); + + RDFWriterFactory rdfWriterFactory = ProtocolUtil.getAcceptableService(request, response, + RDFWriterRegistry.getInstance()); + + Map model = new HashMap<>(); + model.put(TransactionExportStatementsView.SUBJECT_KEY, subj); + model.put(TransactionExportStatementsView.PREDICATE_KEY, pred); + model.put(TransactionExportStatementsView.OBJECT_KEY, obj); + model.put(TransactionExportStatementsView.CONTEXTS_KEY, contexts); + model.put(TransactionExportStatementsView.USE_INFERENCING_KEY, Boolean.valueOf(useInferencing)); + model.put(TransactionExportStatementsView.FACTORY_KEY, rdfWriterFactory); + model.put(TransactionExportStatementsView.HEADERS_ONLY, METHOD_HEAD.equals(request.getMethod())); + + model.put(TransactionExportStatementsView.TRANSACTION_KEY, transaction); + return new ModelAndView(TransactionExportStatementsView.getInstance(), model); + } + + /** + * Evaluates a query on the given connection and returns the resulting {@link QueryResultView}. The + * {@link QueryResultView} will take care of correctly releasing the connection back to the + * {@link ActiveTransactionRegistry}, after fully rendering the query result for sending over the wire. + */ + private ModelAndView processQuery(Transaction txn, HttpServletRequest request, HttpServletResponse response) + throws IOException, HTTPException { + String queryStr; + final String contentType = request.getContentType(); + if (contentType != null && contentType.contains(Protocol.SPARQL_QUERY_MIME_TYPE)) { + Charset charset = getCharset(request); + queryStr = IOUtils.toString(request.getInputStream(), charset); + } else { + queryStr = request.getParameter(QUERY_PARAM_NAME); + } + + View view; + Object queryResult; + FileFormatServiceRegistry registry; + + try { + Query query = getQuery(txn, queryStr, request, response); + + if (query instanceof TupleQuery) { + TupleQuery tQuery = (TupleQuery) query; + + queryResult = txn.evaluate(tQuery); + registry = TupleQueryResultWriterRegistry.getInstance(); + view = TupleQueryResultView.getInstance(); + } else if (query instanceof GraphQuery) { + GraphQuery gQuery = (GraphQuery) query; + + queryResult = txn.evaluate(gQuery); + registry = RDFWriterRegistry.getInstance(); + view = GraphQueryResultView.getInstance(); + } else if (query instanceof BooleanQuery) { + BooleanQuery bQuery = (BooleanQuery) query; + + queryResult = txn.evaluate(bQuery); + registry = BooleanQueryResultWriterRegistry.getInstance(); + view = BooleanQueryResultView.getInstance(); + } else { + throw new ClientHTTPException(SC_BAD_REQUEST, "Unsupported query type: " + query.getClass().getName()); + } + } catch (QueryInterruptedException | InterruptedException | ExecutionException e) { + if (e.getCause() != null && e.getCause() instanceof MalformedQueryException) { + ErrorInfo errInfo = new ErrorInfo(ErrorType.MALFORMED_QUERY, e.getCause().getMessage()); + throw new ClientHTTPException(SC_BAD_REQUEST, errInfo.toString()); + } else { + logger.info("Query interrupted", e); + throw new ServerHTTPException(SC_SERVICE_UNAVAILABLE, "Query execution interrupted"); + } + } catch (QueryEvaluationException e) { + logger.info("Query evaluation error", e); + if (e.getCause() != null && e.getCause() instanceof HTTPException) { + // custom signal from the backend, throw as HTTPException + // directly (see SES-1016). + throw (HTTPException) e.getCause(); + } else { + throw new ServerHTTPException("Query evaluation error: " + e.getMessage()); + } + } + Object factory = ProtocolUtil.getAcceptableService(request, response, registry); + + Map model = new HashMap<>(); + model.put(QueryResultView.FILENAME_HINT_KEY, "query-result"); + model.put(QueryResultView.QUERY_RESULT_KEY, queryResult); + model.put(QueryResultView.FACTORY_KEY, factory); + model.put(QueryResultView.HEADERS_ONLY, false); // TODO needed for HEAD + // requests. + return new ModelAndView(view, model); + } + + private static Charset getCharset(HttpServletRequest request) { + return request.getCharacterEncoding() != null ? Charset.forName(request.getCharacterEncoding()) + : StandardCharsets.UTF_8; + } + + private Query getQuery(Transaction txn, String queryStr, HttpServletRequest request, HttpServletResponse response) + throws IOException, ClientHTTPException, InterruptedException, ExecutionException { + Query result = null; + + // default query language is SPARQL + QueryLanguage queryLn = QueryLanguage.SPARQL; + + String queryLnStr = request.getParameter(QUERY_LANGUAGE_PARAM_NAME); + logger.debug("query language param = {}", queryLnStr); + + if (queryLnStr != null) { + queryLn = QueryLanguage.valueOf(queryLnStr); + + if (queryLn == null) { + throw new ClientHTTPException(SC_BAD_REQUEST, "Unknown query language: " + queryLnStr); + } + } + + String baseURI = request.getParameter(Protocol.BASEURI_PARAM_NAME); + + // determine if inferred triples should be included in query evaluation + boolean includeInferred = ProtocolUtil.parseBooleanParam(request, INCLUDE_INFERRED_PARAM_NAME, true); + + String timeout = request.getParameter(Protocol.TIMEOUT_PARAM_NAME); + int maxQueryTime = 0; + if (timeout != null) { + try { + maxQueryTime = Integer.parseInt(timeout); + } catch (NumberFormatException e) { + throw new ClientHTTPException(SC_BAD_REQUEST, "Invalid timeout value: " + timeout); + } + } + + // build a dataset, if specified + String[] defaultGraphURIs = request.getParameterValues(DEFAULT_GRAPH_PARAM_NAME); + String[] namedGraphURIs = request.getParameterValues(NAMED_GRAPH_PARAM_NAME); + + SimpleDataset dataset = null; + if (defaultGraphURIs != null || namedGraphURIs != null) { + dataset = new SimpleDataset(); + + if (defaultGraphURIs != null) { + for (String defaultGraphURI : defaultGraphURIs) { + try { + IRI uri = null; + if (!"null".equals(defaultGraphURI)) { + uri = SimpleValueFactory.getInstance().createIRI(defaultGraphURI); + } + dataset.addDefaultGraph(uri); + } catch (IllegalArgumentException e) { + throw new ClientHTTPException(SC_BAD_REQUEST, + "Illegal URI for default graph: " + defaultGraphURI); + } + } + } + + if (namedGraphURIs != null) { + for (String namedGraphURI : namedGraphURIs) { + try { + IRI uri = null; + if (!"null".equals(namedGraphURI)) { + uri = SimpleValueFactory.getInstance().createIRI(namedGraphURI); + } + dataset.addNamedGraph(uri); + } catch (IllegalArgumentException e) { + throw new ClientHTTPException(SC_BAD_REQUEST, "Illegal URI for named graph: " + namedGraphURI); + } + } + } + } + + try { + result = txn.prepareQuery(queryLn, queryStr, baseURI); + result.setIncludeInferred(includeInferred); + + if (maxQueryTime > 0) { + result.setMaxExecutionTime(maxQueryTime); + } + + if (dataset != null) { + result.setDataset(dataset); + } + + // determine if any variable bindings have been set on this query. + @SuppressWarnings("unchecked") + Enumeration parameterNames = request.getParameterNames(); + + while (parameterNames.hasMoreElements()) { + String parameterName = parameterNames.nextElement(); + + if (parameterName.startsWith(BINDING_PREFIX) && parameterName.length() > BINDING_PREFIX.length()) { + String bindingName = parameterName.substring(BINDING_PREFIX.length()); + Value bindingValue = ProtocolUtil.parseValueParam(request, parameterName, + SimpleValueFactory.getInstance()); + result.setBinding(bindingName, bindingValue); + } + } + } catch (UnsupportedQueryLanguageException e) { + ErrorInfo errInfo = new ErrorInfo(ErrorType.UNSUPPORTED_QUERY_LANGUAGE, queryLn.getName()); + throw new ClientHTTPException(SC_BAD_REQUEST, errInfo.toString()); + } catch (MalformedQueryException e) { + ErrorInfo errInfo = new ErrorInfo(ErrorType.MALFORMED_QUERY, e.getMessage()); + throw new ClientHTTPException(SC_BAD_REQUEST, errInfo.toString()); + } catch (RepositoryException e) { + logger.error("Repository error", e); + response.sendError(SC_INTERNAL_SERVER_ERROR); + } + + return result; + } + + private ModelAndView getSparqlUpdateResult(Transaction transaction, HttpServletRequest request, + HttpServletResponse response) throws ServerHTTPException, ClientHTTPException, HTTPException { + String sparqlUpdateString; + final String contentType = request.getContentType(); + if (contentType != null && contentType.contains(Protocol.SPARQL_UPDATE_MIME_TYPE)) { + try { + Charset charset = getCharset(request); + sparqlUpdateString = IOUtils.toString(request.getInputStream(), charset); + } catch (IOException e) { + logger.warn("error reading sparql update string from request body", e); + throw new ClientHTTPException(SC_BAD_REQUEST, + "could not read SPARQL update string from body: " + e.getMessage()); + } + } else { + sparqlUpdateString = request.getParameter(Protocol.UPDATE_PARAM_NAME); + } + + if (null == sparqlUpdateString) { + throw new ClientHTTPException(SC_NOT_ACCEPTABLE, "Could not read SPARQL update string from body."); + } + + logger.debug("SPARQL update string: {}", sparqlUpdateString); + + // default query language is SPARQL + QueryLanguage queryLn = QueryLanguage.SPARQL; + + String queryLnStr = request.getParameter(QUERY_LANGUAGE_PARAM_NAME); + logger.debug("query language param = {}", queryLnStr); + + if (queryLnStr != null) { + queryLn = QueryLanguage.valueOf(queryLnStr); + + if (queryLn == null) { + throw new ClientHTTPException(SC_BAD_REQUEST, "Unknown query language: " + queryLnStr); + } + } + + String baseURI = request.getParameter(Protocol.BASEURI_PARAM_NAME); + + // determine if inferred triples should be included in query evaluation + boolean includeInferred = ProtocolUtil.parseBooleanParam(request, INCLUDE_INFERRED_PARAM_NAME, true); + + // build a dataset, if specified + String[] defaultRemoveGraphURIs = request.getParameterValues(REMOVE_GRAPH_PARAM_NAME); + String[] defaultInsertGraphURIs = request.getParameterValues(INSERT_GRAPH_PARAM_NAME); + String[] defaultGraphURIs = request.getParameterValues(USING_GRAPH_PARAM_NAME); + String[] namedGraphURIs = request.getParameterValues(USING_NAMED_GRAPH_PARAM_NAME); + + SimpleDataset dataset = new SimpleDataset(); + + if (defaultRemoveGraphURIs != null) { + for (String graphURI : defaultRemoveGraphURIs) { + try { + IRI uri = null; + if (!"null".equals(graphURI)) { + uri = SimpleValueFactory.getInstance().createIRI(graphURI); + } + dataset.addDefaultRemoveGraph(uri); + } catch (IllegalArgumentException e) { + throw new ClientHTTPException(SC_BAD_REQUEST, "Illegal URI for default remove graph: " + graphURI); + } + } + } + + if (defaultInsertGraphURIs != null && defaultInsertGraphURIs.length > 0) { + String graphURI = defaultInsertGraphURIs[0]; + try { + IRI uri = null; + if (!"null".equals(graphURI)) { + uri = SimpleValueFactory.getInstance().createIRI(graphURI); + } + dataset.setDefaultInsertGraph(uri); + } catch (IllegalArgumentException e) { + throw new ClientHTTPException(SC_BAD_REQUEST, "Illegal URI for default insert graph: " + graphURI); + } + } + + if (defaultGraphURIs != null) { + for (String defaultGraphURI : defaultGraphURIs) { + try { + IRI uri = null; + if (!"null".equals(defaultGraphURI)) { + uri = SimpleValueFactory.getInstance().createIRI(defaultGraphURI); + } + dataset.addDefaultGraph(uri); + } catch (IllegalArgumentException e) { + throw new ClientHTTPException(SC_BAD_REQUEST, "Illegal URI for default graph: " + defaultGraphURI); + } + } + } + + if (namedGraphURIs != null) { + for (String namedGraphURI : namedGraphURIs) { + try { + IRI uri = null; + if (!"null".equals(namedGraphURI)) { + uri = SimpleValueFactory.getInstance().createIRI(namedGraphURI); + } + dataset.addNamedGraph(uri); + } catch (IllegalArgumentException e) { + throw new ClientHTTPException(SC_BAD_REQUEST, "Illegal URI for named graph: " + namedGraphURI); + } + } + } + + if (logger.isDebugEnabled()) { + StringBuilder datasetStr = new StringBuilder(); + dataset.getDefaultGraphs() + .forEach(g -> datasetStr.append("DEFAULT GRAPH: FROM <").append(g.stringValue()).append(">\n")); + dataset.getNamedGraphs() + .forEach(g -> datasetStr.append("NAMED GRAPH: FROM NAMED <").append(g.stringValue()).append(">\n")); + dataset.getDefaultRemoveGraphs() + .forEach(g -> datasetStr.append("DEFAULT REMOVE GRAPH: DELETE FROM <") + .append(g.stringValue()) + .append(">\n")); + Optional.ofNullable(dataset.getDefaultInsertGraph()) + .ifPresent(g -> datasetStr.append("DEFAULT INSERT GRAPH: INSERT INTO <") + .append(g.stringValue()) + .append(">\n")); + + logger.debug("Dataset: {}", datasetStr); + } + + try { + // determine if any variable bindings have been set on this update. + @SuppressWarnings("unchecked") + Enumeration parameterNames = request.getParameterNames(); + + Map bindings = new HashMap<>(); + while (parameterNames.hasMoreElements()) { + String parameterName = parameterNames.nextElement(); + + if (parameterName.startsWith(BINDING_PREFIX) && parameterName.length() > BINDING_PREFIX.length()) { + String bindingName = parameterName.substring(BINDING_PREFIX.length()); + Value bindingValue = ProtocolUtil.parseValueParam(request, parameterName, + SimpleValueFactory.getInstance()); + bindings.put(bindingName, bindingValue); + } + } + + transaction.executeUpdate(queryLn, sparqlUpdateString, baseURI, includeInferred, dataset, bindings); + + return new ModelAndView(EmptySuccessView.getInstance()); + } catch (UpdateExecutionException | InterruptedException | ExecutionException | RepositoryException e) { + if (e.getCause() != null && e.getCause() instanceof HTTPException) { + // custom signal from the backend, throw as HTTPException directly + // (see SES-1016). + throw (HTTPException) e.getCause(); + } else { + throw new ServerHTTPException("Repository update error: " + e.getMessage(), e); + } + } + // custom signal from the backend, throw as HTTPException directly + // (see SES-1016). + catch (MalformedQueryException e) { + ErrorInfo errInfo = new ErrorInfo(ErrorType.MALFORMED_QUERY, e.getMessage()); + throw new ClientHTTPException(SC_BAD_REQUEST, errInfo.toString()); + } + } + + // Comes from disposableBean interface so to be able to stop the ActiveTransactionRegistry scheduler + @Override + public void destroy() + throws Exception { + ActiveTransactionRegistry.INSTANCE.destroyScheduler(); + } + +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/transaction/TransactionExportStatementsView.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/transaction/TransactionExportStatementsView.java new file mode 100644 index 00000000000..b5b0bfc0a0a --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/transaction/TransactionExportStatementsView.java @@ -0,0 +1,118 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.repository.transaction; + +import static jakarta.servlet.http.HttpServletResponse.SC_OK; + +import java.io.OutputStream; +import java.nio.charset.Charset; +import java.util.Map; +import java.util.concurrent.ExecutionException; + +import org.eclipse.rdf4j.http.server.ServerHTTPException; +import org.eclipse.rdf4j.model.IRI; +import org.eclipse.rdf4j.model.Resource; +import org.eclipse.rdf4j.model.Value; +import org.eclipse.rdf4j.rio.RDFFormat; +import org.eclipse.rdf4j.rio.RDFHandlerException; +import org.eclipse.rdf4j.rio.RDFWriter; +import org.eclipse.rdf4j.rio.RDFWriterFactory; +import org.springframework.web.servlet.View; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; + +/** + * View used to export statements as part of a transaction. Renders the statements as RDF using a serialization + * specified using a parameter or Accept header. + * + * @author Herko ter Horst + * @author Jeen Broekstra + */ +public class TransactionExportStatementsView implements View { + + public static final String SUBJECT_KEY = "subject"; + + public static final String PREDICATE_KEY = "predicate"; + + public static final String OBJECT_KEY = "object"; + + public static final String CONTEXTS_KEY = "contexts"; + + public static final String USE_INFERENCING_KEY = "useInferencing"; + + public static final String TRANSACTION_KEY = "transaction"; + + public static final String FACTORY_KEY = "factory"; + + public static final String HEADERS_ONLY = "headersOnly"; + + private static final TransactionExportStatementsView INSTANCE = new TransactionExportStatementsView(); + + public static TransactionExportStatementsView getInstance() { + return INSTANCE; + } + + private TransactionExportStatementsView() { + } + + @Override + public String getContentType() { + return null; + } + + @SuppressWarnings("rawtypes") + @Override + public void render(Map model, HttpServletRequest request, HttpServletResponse response) throws Exception { + Resource subj = (Resource) model.get(SUBJECT_KEY); + IRI pred = (IRI) model.get(PREDICATE_KEY); + Value obj = (Value) model.get(OBJECT_KEY); + Resource[] contexts = (Resource[]) model.get(CONTEXTS_KEY); + boolean useInferencing = (Boolean) model.get(USE_INFERENCING_KEY); + Transaction transaction = (Transaction) model.get(TRANSACTION_KEY); + + boolean headersOnly = (Boolean) model.get(HEADERS_ONLY); + + RDFWriterFactory rdfWriterFactory = (RDFWriterFactory) model.get(FACTORY_KEY); + + RDFFormat rdfFormat = rdfWriterFactory.getRDFFormat(); + + try { + try (OutputStream out = response.getOutputStream()) { + RDFWriter rdfWriter = rdfWriterFactory.getWriter(out); + + response.setStatus(SC_OK); + + String mimeType = rdfFormat.getDefaultMIMEType(); + if (rdfFormat.hasCharset()) { + Charset charset = rdfFormat.getCharset(); + mimeType += "; charset=" + charset.name(); + } + response.setContentType(mimeType); + + String filename = "statements"; + if (rdfFormat.getDefaultFileExtension() != null) { + filename += "." + rdfFormat.getDefaultFileExtension(); + } + response.setHeader("Content-Disposition", "attachment; filename=" + filename); + + if (!headersOnly) { + transaction.exportStatements(subj, pred, obj, useInferencing, rdfWriter, contexts); + } + } + } catch (RDFHandlerException e) { + throw new ServerHTTPException("Serialization error: " + e.getMessage(), e); + } catch (ExecutionException | InterruptedException e) { + throw new ServerHTTPException("Repository error: " + e.getMessage(), e); + } + } + +} diff --git a/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/transaction/TransactionStartController.java b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/transaction/TransactionStartController.java new file mode 100644 index 00000000000..72c95e83c7f --- /dev/null +++ b/tools/server-spring6/src/main/java/org/eclipse/rdf4j/http/server/repository/transaction/TransactionStartController.java @@ -0,0 +1,189 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.repository.transaction; + +import static jakarta.servlet.http.HttpServletResponse.SC_CREATED; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Map; +import java.util.UUID; +import java.util.concurrent.ExecutionException; + +import org.eclipse.rdf4j.common.transaction.IsolationLevel; +import org.eclipse.rdf4j.common.transaction.IsolationLevels; +import org.eclipse.rdf4j.common.transaction.TransactionSetting; +import org.eclipse.rdf4j.common.transaction.TransactionSettingRegistry; +import org.eclipse.rdf4j.common.webapp.views.SimpleResponseView; +import org.eclipse.rdf4j.http.protocol.Protocol; +import org.eclipse.rdf4j.http.server.ClientHTTPException; +import org.eclipse.rdf4j.http.server.ProtocolUtil; +import org.eclipse.rdf4j.http.server.ServerHTTPException; +import org.eclipse.rdf4j.http.server.repository.RepositoryInterceptor; +import org.eclipse.rdf4j.model.vocabulary.SESAME; +import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.RepositoryException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.context.ApplicationContextException; +import org.springframework.web.servlet.ModelAndView; +import org.springframework.web.servlet.mvc.AbstractController; + +import jakarta.servlet.http.HttpServletRequest; +import jakarta.servlet.http.HttpServletResponse; + +/** + * Handles requests for transaction creation on a repository. + * + * @author Jeen Broekstra + */ +public class TransactionStartController extends AbstractController { + + private final Logger logger = LoggerFactory.getLogger(this.getClass()); + private String externalUrl; + + public TransactionStartController() throws ApplicationContextException { + setSupportedMethods(METHOD_POST); + } + + @Override + protected ModelAndView handleRequestInternal(HttpServletRequest request, HttpServletResponse response) + throws Exception { + ModelAndView result; + + Repository repository = RepositoryInterceptor.getRepository(request); + + String reqMethod = request.getMethod(); + + if (METHOD_POST.equals(reqMethod)) { + logger.info("POST transaction start"); + result = startTransaction(repository, request, response); + logger.info("transaction started"); + } else { + throw new ClientHTTPException(HttpServletResponse.SC_METHOD_NOT_ALLOWED, + "Method not allowed: " + reqMethod); + } + return result; + } + + @Deprecated + ArrayList getIsolationLevel(HttpServletRequest request) { + // process legacy isolation level param for backward compatibility with older clients + + ArrayList transactionSettings = new ArrayList<>(); + final String isolationLevelString = request.getParameter(Protocol.ISOLATION_LEVEL_PARAM_NAME); + if (isolationLevelString != null) { + final String isolationLevelConverted = isolationLevelString.replace(SESAME.NAMESPACE, ""); + for (IsolationLevel standardLevel : IsolationLevels.values()) { + if (standardLevel.getValue().equals(isolationLevelConverted)) { + transactionSettings.add(IsolationLevels.valueOf(isolationLevelConverted)); + break; + } + } + if (transactionSettings.isEmpty()) + throw new IllegalArgumentException("Unknown isolation-level setting " + isolationLevelString); + } + + return transactionSettings; + } + + ArrayList getTransactionSettings(HttpServletRequest request) { + ArrayList transactionSettings = new ArrayList<>(); + request.getParameterMap().forEach((k, v) -> { + if (k.startsWith(Protocol.TRANSACTION_SETTINGS_PREFIX)) { + String settingsName = k.replace(Protocol.TRANSACTION_SETTINGS_PREFIX, ""); + + // FIXME we should make the isolation level an SPI impl as well so that it will work with + // non-standard isolation levels + if (settingsName.equals(IsolationLevels.NONE.getName())) { + transactionSettings.add(IsolationLevels.valueOf(v[0])); + } else { + TransactionSettingRegistry.getInstance() + .get(settingsName) + .flatMap(factory -> factory.getTransactionSetting(v[0])) + .ifPresent(transactionSettings::add); + } + } + }); + + return transactionSettings; + } + + Transaction createTransaction(Repository repository) throws ExecutionException, InterruptedException { + return new Transaction(repository); + } + + private ModelAndView startTransaction(Repository repository, HttpServletRequest request, + HttpServletResponse response) throws IOException, ClientHTTPException, ServerHTTPException { + ProtocolUtil.logRequestParameters(request); + Map model = new HashMap<>(); + + ArrayList transactionSettings = getIsolationLevel(request); + transactionSettings.addAll(getTransactionSettings(request)); + + Transaction txn = null; + boolean allGood = false; + try { + txn = createTransaction(repository); + + if (transactionSettings.isEmpty()) { + txn.begin(); + } else { + txn.begin(transactionSettings.toArray(new TransactionSetting[0])); + } + + UUID txnId = txn.getID(); + + model.put(SimpleResponseView.SC_KEY, SC_CREATED); + final StringBuffer txnURL = getUrlBasePath(request); + txnURL.append("/" + txnId.toString()); + Map customHeaders = new HashMap<>(); + customHeaders.put("Location", txnURL.toString()); + model.put(SimpleResponseView.CUSTOM_HEADERS_KEY, customHeaders); + + ModelAndView result = new ModelAndView(SimpleResponseView.getInstance(), model); + ActiveTransactionRegistry.INSTANCE.register(txn); + allGood = true; + return result; + } catch (RepositoryException | InterruptedException | ExecutionException e) { + throw new ServerHTTPException("Transaction start error: " + e.getMessage(), e); + } finally { + if (!allGood) { + try { + txn.close(); + } catch (InterruptedException | ExecutionException e) { + throw new ServerHTTPException("Transaction start error: " + e.getMessage(), e); + } + } + } + } + + private StringBuffer getUrlBasePath(final HttpServletRequest request) { + if (externalUrl == null) { + return request.getRequestURL(); + } + + final StringBuffer url = new StringBuffer(); + if (externalUrl.endsWith("/")) { + url.append(externalUrl, 0, externalUrl.length() - 1); + } else { + url.append(externalUrl); + } + + url.append(request.getRequestURI()); + return url; + } + + public void setExternalUrl(final String externalUrl) { + this.externalUrl = externalUrl; + } +} diff --git a/tools/server-spring6/src/test/java/org/eclipse/rdf4j/common/webapp/navigation/NavigationTest.java b/tools/server-spring6/src/test/java/org/eclipse/rdf4j/common/webapp/navigation/NavigationTest.java new file mode 100644 index 00000000000..aae71334dd6 --- /dev/null +++ b/tools/server-spring6/src/test/java/org/eclipse/rdf4j/common/webapp/navigation/NavigationTest.java @@ -0,0 +1,63 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.common.webapp.navigation; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.util.ArrayList; +import java.util.List; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +public class NavigationTest { + + private NavigationModel model = null; + + @BeforeEach + public void setUp() { + model = new NavigationModel(); + List navigationModelLocations = new ArrayList<>(); + navigationModelLocations.add("/navigation.xml"); + model.setNavigationModels(navigationModelLocations); + } + + @Test + public void testParse() { + assertNotNull(model, "Parsed model is null"); + assertEquals(1, model.getGroups().size(), "Model should have one group"); + Group systemGroup = model.getGroups().get(0); + assertEquals(1, systemGroup.getGroups().size(), "system group should have 1 subgroup"); + assertEquals(2, systemGroup.getViews().size(), "system group should have 2 views"); + View loggingView = systemGroup.getViews().get(1); + assertFalse(loggingView.isHidden(), "logging view should not be hidden"); + assertTrue(loggingView.isEnabled(), "logging view should be enabled"); + assertEquals("/system/logging.view", loggingView.getPath(), "Path for logging is not correct"); + assertEquals("/images/icons/system_logging.png", loggingView.getIcon(), "Icon for logging is not correct"); + assertEquals("system.logging.title", loggingView.getI18n(), "I18N for logging is not correct"); + Group loggingGroup = systemGroup.getGroups().get(0); + assertEquals(1, loggingGroup.getViews().size(), "logging subgroup should have 1 views"); + assertTrue(loggingGroup.isHidden(), "logging subgroup should be hidden"); + assertTrue(loggingGroup.isEnabled(), "logging subgroup should be enabled"); + View loggingOverview = loggingGroup.getViews().get(0); + assertFalse(loggingOverview.isEnabled(), "logging overview should be disabled"); + } + + @Test + public void testFind() { + assertNotNull(model.findView("/system/logging/overview.view"), "Find should have succeeded"); + assertNull(model.findView("/system/logging/bogus.view"), "Find should not have succeeded"); + } +} diff --git a/tools/server-spring6/src/test/java/org/eclipse/rdf4j/common/webapp/util/HttpServerUtilTest.java b/tools/server-spring6/src/test/java/org/eclipse/rdf4j/common/webapp/util/HttpServerUtilTest.java new file mode 100644 index 00000000000..a1db33f605b --- /dev/null +++ b/tools/server-spring6/src/test/java/org/eclipse/rdf4j/common/webapp/util/HttpServerUtilTest.java @@ -0,0 +1,633 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.common.webapp.util; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.io.BufferedReader; +import java.security.Principal; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Enumeration; +import java.util.Locale; +import java.util.Map; + +import org.eclipse.rdf4j.common.lang.FileFormat; +import org.eclipse.rdf4j.common.lang.service.FileFormatServiceRegistry; +import org.eclipse.rdf4j.query.resultio.TupleQueryResultWriterRegistry; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import jakarta.servlet.*; +import jakarta.servlet.http.*; + +/** + * @author jeen + */ +public class HttpServerUtilTest { + + private ArrayList tupleQueryMimeTypes; + + /** + */ + @BeforeEach + public void setUp() { + FileFormatServiceRegistry registry = TupleQueryResultWriterRegistry.getInstance(); + + tupleQueryMimeTypes = new ArrayList<>(16); + for (FileFormat format : registry.getKeys()) { + tupleQueryMimeTypes.addAll(format.getMIMETypes()); + } + } + + /** + * Test method for + * {@link org.eclipse.rdf4j.common.webapp.util.HttpServerUtil#selectPreferredMIMEType(java.util.Iterator, jakarta.servlet.http.HttpServletRequest)} + * . + */ + @Test + public void testSelectPreferredMIMEType1() { + + ServletRequestStub testRequest = new ServletRequestStub("application/sparql-results+json, */*"); + + String preferredType = HttpServerUtil.selectPreferredMIMEType(tupleQueryMimeTypes.iterator(), testRequest); + + assertEquals("application/sparql-results+json", preferredType); + + } + + /** + * Test method for + * {@link org.eclipse.rdf4j.common.webapp.util.HttpServerUtil#selectPreferredMIMEType(java.util.Iterator, jakarta.servlet.http.HttpServletRequest)} + * . + */ + @Test + public void testSelectPreferredMIMEType2() { + + ServletRequestStub testRequest = new ServletRequestStub("application/sparql-results+json, */*;q=0.9"); + + String preferredType = HttpServerUtil.selectPreferredMIMEType(tupleQueryMimeTypes.iterator(), testRequest); + + assertEquals("application/sparql-results+json", preferredType); + } + + /** + * Test method for + * {@link org.eclipse.rdf4j.common.webapp.util.HttpServerUtil#selectPreferredMIMEType(java.util.Iterator, jakarta.servlet.http.HttpServletRequest)} + * . + */ + @Test + public void testSelectPreferredMIMEType3() { + + ServletRequestStub testRequest = new ServletRequestStub("application/xml"); + + String preferredType = HttpServerUtil.selectPreferredMIMEType(tupleQueryMimeTypes.iterator(), testRequest); + + assertEquals("application/xml", preferredType); + } + + /** + * Test method for + * {@link org.eclipse.rdf4j.common.webapp.util.HttpServerUtil#selectPreferredMIMEType(java.util.Iterator, jakarta.servlet.http.HttpServletRequest)} + * . + */ + @Test + public void testSelectPreferredMIMEType4() { + + ServletRequestStub testRequest = new ServletRequestStub("*/*", "application/sparql-result+xml;q=0.9", + "application/sparql-results+json"); + + String preferredType = HttpServerUtil.selectPreferredMIMEType(tupleQueryMimeTypes.iterator(), testRequest); + + assertEquals("application/sparql-results+json", preferredType); + } + + /** + * Test method for + * {@link org.eclipse.rdf4j.common.webapp.util.HttpServerUtil#selectPreferredMIMEType(java.util.Iterator, jakarta.servlet.http.HttpServletRequest)} + * . + */ + @Test + public void testSelectPreferredMIMEType5() { + + ServletRequestStub testRequest = new ServletRequestStub("application/*", "application/sparql-results+json"); + + String preferredType = HttpServerUtil.selectPreferredMIMEType(tupleQueryMimeTypes.iterator(), testRequest); + + assertEquals("application/sparql-results+json", preferredType); + } + + class ServletRequestStub implements jakarta.servlet.http.HttpServletRequest { + + private final Enumeration testHeaders; + + public ServletRequestStub(String... testHeaders) { + this.testHeaders = Collections.enumeration(Arrays.asList(testHeaders)); + } + + @Override + public Object getAttribute(String name) { + // TODO Auto-generated method stub + return null; + } + + @Override + public Enumeration getAttributeNames() { + // TODO Auto-generated method stub + return null; + } + + @Override + public String getCharacterEncoding() { + // TODO Auto-generated method stub + return null; + } + + @Override + public void setCharacterEncoding(String env) { + // TODO Auto-generated method stub + + } + + @Override + public int getContentLength() { + // TODO Auto-generated method stub + return 0; + } + + @Override + public String getContentType() { + // TODO Auto-generated method stub + return null; + } + + @Override + public ServletInputStream getInputStream() { + // TODO Auto-generated method stub + return null; + } + + @Override + public String getParameter(String name) { + // TODO Auto-generated method stub + return null; + } + + @Override + public Enumeration getParameterNames() { + // TODO Auto-generated method stub + return null; + } + + @Override + public String[] getParameterValues(String name) { + // TODO Auto-generated method stub + return null; + } + + @Override + public Map getParameterMap() { + // TODO Auto-generated method stub + return null; + } + + @Override + public String getProtocol() { + // TODO Auto-generated method stub + return null; + } + + @Override + public String getScheme() { + // TODO Auto-generated method stub + return null; + } + + @Override + public String getServerName() { + // TODO Auto-generated method stub + return null; + } + + @Override + public int getServerPort() { + // TODO Auto-generated method stub + return 0; + } + + @Override + public BufferedReader getReader() { + // TODO Auto-generated method stub + return null; + } + + @Override + public String getRemoteAddr() { + // TODO Auto-generated method stub + return null; + } + + @Override + public String getRemoteHost() { + // TODO Auto-generated method stub + return null; + } + + @Override + public void setAttribute(String name, Object o) { + // TODO Auto-generated method stub + + } + + @Override + public void removeAttribute(String name) { + // TODO Auto-generated method stub + + } + + @Override + public Locale getLocale() { + // TODO Auto-generated method stub + return null; + } + + @Override + public Enumeration getLocales() { + // TODO Auto-generated method stub + return null; + } + + @Override + public boolean isSecure() { + // TODO Auto-generated method stub + return false; + } + + @Override + public RequestDispatcher getRequestDispatcher(String path) { + // TODO Auto-generated method stub + return null; + } + + @Override + public int getRemotePort() { + // TODO Auto-generated method stub + return 0; + } + + @Override + public String getLocalName() { + // TODO Auto-generated method stub + return null; + } + + @Override + public String getLocalAddr() { + // TODO Auto-generated method stub + return null; + } + + @Override + public int getLocalPort() { + // TODO Auto-generated method stub + return 0; + } + + @Override + public String getAuthType() { + // TODO Auto-generated method stub + return null; + } + + @Override + public Cookie[] getCookies() { + // TODO Auto-generated method stub + return null; + } + + @Override + public long getDateHeader(String name) { + // TODO Auto-generated method stub + return 0; + } + + @Override + public String getHeader(String name) { + // TODO Auto-generated method stub + return null; + } + + @Override + public Enumeration getHeaders(String name) { + return testHeaders; + } + + @Override + public Enumeration getHeaderNames() { + // TODO Auto-generated method stub + return null; + } + + @Override + public int getIntHeader(String name) { + // TODO Auto-generated method stub + return 0; + } + + @Override + public String getMethod() { + // TODO Auto-generated method stub + return null; + } + + @Override + public String getPathInfo() { + // TODO Auto-generated method stub + return null; + } + + @Override + public String getPathTranslated() { + // TODO Auto-generated method stub + return null; + } + + @Override + public String getContextPath() { + // TODO Auto-generated method stub + return null; + } + + @Override + public String getQueryString() { + // TODO Auto-generated method stub + return null; + } + + @Override + public String getRemoteUser() { + // TODO Auto-generated method stub + return null; + } + + @Override + public boolean isUserInRole(String role) { + // TODO Auto-generated method stub + return false; + } + + @Override + public Principal getUserPrincipal() { + // TODO Auto-generated method stub + return null; + } + + @Override + public String getRequestedSessionId() { + // TODO Auto-generated method stub + return null; + } + + @Override + public String getRequestURI() { + // TODO Auto-generated method stub + return null; + } + + @Override + public StringBuffer getRequestURL() { + // TODO Auto-generated method stub + return null; + } + + @Override + public String getServletPath() { + // TODO Auto-generated method stub + return null; + } + + @Override + public HttpSession getSession(boolean create) { + // TODO Auto-generated method stub + return null; + } + + @Override + public HttpSession getSession() { + // TODO Auto-generated method stub + return null; + } + + @Override + public boolean isRequestedSessionIdValid() { + // TODO Auto-generated method stub + return false; + } + + @Override + public boolean isRequestedSessionIdFromCookie() { + // TODO Auto-generated method stub + return false; + } + + @Override + public boolean isRequestedSessionIdFromURL() { + // TODO Auto-generated method stub + return false; + } + + /* + * (non-Javadoc) + * + * @see jakarta.servlet.ServletRequest#getServletContext() + */ + @Override + public ServletContext getServletContext() { + // TODO Auto-generated method stub + return null; + } + + /* + * (non-Javadoc) + * + * @see jakarta.servlet.ServletRequest#startAsync() + */ + @Override + public AsyncContext startAsync() throws IllegalStateException { + // TODO Auto-generated method stub + return null; + } + + /* + * (non-Javadoc) + * + * @see jakarta.servlet.ServletRequest#startAsync(jakarta.servlet.ServletRequest, + * jakarta.servlet.ServletResponse) + */ + @Override + public AsyncContext startAsync(ServletRequest servletRequest, ServletResponse servletResponse) + throws IllegalStateException { + // TODO Auto-generated method stub + return null; + } + + /* + * (non-Javadoc) + * + * @see jakarta.servlet.ServletRequest#isAsyncStarted() + */ + @Override + public boolean isAsyncStarted() { + // TODO Auto-generated method stub + return false; + } + + /* + * (non-Javadoc) + * + * @see jakarta.servlet.ServletRequest#isAsyncSupported() + */ + @Override + public boolean isAsyncSupported() { + // TODO Auto-generated method stub + return false; + } + + /* + * (non-Javadoc) + * + * @see jakarta.servlet.ServletRequest#getAsyncContext() + */ + @Override + public AsyncContext getAsyncContext() { + // TODO Auto-generated method stub + return null; + } + + /* + * (non-Javadoc) + * + * @see jakarta.servlet.ServletRequest#getDispatcherType() + */ + @Override + public DispatcherType getDispatcherType() { + // TODO Auto-generated method stub + return null; + } + + /* + * (non-Javadoc) + * + * @see jakarta.servlet.http.HttpServletRequest#authenticate(jakarta.servlet.http.HttpServletResponse) + */ + @Override + public boolean authenticate(HttpServletResponse response) throws ServletException { + // TODO Auto-generated method stub + return false; + } + + /* + * (non-Javadoc) + * + * @see jakarta.servlet.http.HttpServletRequest#login(java.lang.String, java.lang.String) + */ + @Override + public void login(String username, String password) throws ServletException { + // TODO Auto-generated method stub + + } + + /* + * (non-Javadoc) + * + * @see jakarta.servlet.http.HttpServletRequest#logout() + */ + @Override + public void logout() throws ServletException { + // TODO Auto-generated method stub + + } + + /* + * (non-Javadoc) + * + * @see jakarta.servlet.http.HttpServletRequest#getParts() + */ + @Override + public Collection getParts() throws ServletException { + // TODO Auto-generated method stub + return null; + } + + /* + * (non-Javadoc) + * + * @see jakarta.servlet.http.HttpServletRequest#getPart(java.lang.String) + */ + @Override + public Part getPart(String name) throws ServletException { + // TODO Auto-generated method stub + return null; + } + + @Override + public T upgrade(Class handlerClass) { + return null; + } + + @Override + public String changeSessionId() { + return null; + } + + @Override + public long getContentLengthLong() { + return 0; + } + + @Override + public String getRequestId() { + return ""; + } + + @Override + public String getProtocolRequestId() { + return ""; + } + + @Override + public ServletConnection getServletConnection() { + return null; + } + + @Override + public HttpServletMapping getHttpServletMapping() { + return HttpServletRequest.super.getHttpServletMapping(); + } + + @Override + public PushBuilder newPushBuilder() { + return HttpServletRequest.super.newPushBuilder(); + } + + @Override + public Map getTrailerFields() { + return HttpServletRequest.super.getTrailerFields(); + } + + @Override + public boolean isTrailerFieldsReady() { + return HttpServletRequest.super.isTrailerFieldsReady(); + } + } +} diff --git a/tools/server-spring6/src/test/java/org/eclipse/rdf4j/http/server/repository/RepositoryControllerTest.java b/tools/server-spring6/src/test/java/org/eclipse/rdf4j/http/server/repository/RepositoryControllerTest.java new file mode 100644 index 00000000000..38d89d72ec3 --- /dev/null +++ b/tools/server-spring6/src/test/java/org/eclipse/rdf4j/http/server/repository/RepositoryControllerTest.java @@ -0,0 +1,122 @@ +/******************************************************************************* + * Copyright (c) 2019 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.repository; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.junit.jupiter.api.Assertions.fail; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.nio.charset.StandardCharsets; + +import org.eclipse.rdf4j.http.server.ClientHTTPException; +import org.eclipse.rdf4j.repository.config.RepositoryConfig; +import org.eclipse.rdf4j.repository.config.RepositoryConfigException; +import org.eclipse.rdf4j.repository.config.RepositoryConfigSchema; +import org.eclipse.rdf4j.repository.manager.RepositoryManager; +import org.eclipse.rdf4j.rio.RDFFormat; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.ArgumentCaptor; +import org.mockito.Mockito; +import org.springframework.http.HttpMethod; +import org.springframework.mock.web.MockHttpServletRequest; +import org.springframework.mock.web.MockHttpServletResponse; + +public class RepositoryControllerTest { + + final String repositoryId = "test-repo"; + final RepositoryController controller = new RepositoryController(); + + private MockHttpServletRequest request; + private MockHttpServletResponse response; + private RepositoryManager manager; + + @BeforeEach + public void setUp() { + request = new MockHttpServletRequest(); + request.setAttribute("repositoryID", repositoryId); + response = new MockHttpServletResponse(); + + manager = mock(RepositoryManager.class); + controller.setRepositoryManager(manager); + } + + @Test + public void putOnNewRepoSucceeds() throws Exception { + request.setMethod(HttpMethod.PUT.name()); + request.setContentType(RDFFormat.NTRIPLES.getDefaultMIMEType()); + request.setContent( + ("_:node1 <" + RepositoryConfigSchema.REPOSITORYID + "> \"" + repositoryId + "\" .") + .getBytes(StandardCharsets.UTF_8)); + + when(manager.hasRepositoryConfig(repositoryId)).thenReturn(false); + + ArgumentCaptor config = ArgumentCaptor.forClass(RepositoryConfig.class); + + controller.handleRequest(request, response); + + verify(manager).addRepositoryConfig(config.capture()); + assertThat(config.getValue().getID()).isEqualTo(repositoryId); + } + + @Test + public void putOnExistingRepoFails() throws Exception { + request.setMethod(HttpMethod.PUT.name()); + request.setContentType(RDFFormat.NTRIPLES.getDefaultMIMEType()); + request.setContent( + ("_:node1 <" + RepositoryConfigSchema.REPOSITORYID + "> \"" + repositoryId + "\" .") + .getBytes(StandardCharsets.UTF_8)); + when(manager.hasRepositoryConfig(repositoryId)).thenReturn(true); + + try { + controller.handleRequest(request, response); + fail("expected exception"); + } catch (ClientHTTPException e) { + assertThat(e.getStatusCode()).isEqualTo(409); + } + } + + @Test + public void put_errorHandling_MissingConfig() throws Exception { + request.setMethod(HttpMethod.PUT.name()); + request.setContentType(RDFFormat.NTRIPLES.getDefaultMIMEType()); + request.setContent(("").getBytes(StandardCharsets.UTF_8)); + + try { + controller.handleRequest(request, response); + fail("expected exception"); + } catch (ClientHTTPException e) { + assertThat(e.getStatusCode()).isEqualTo(400); + assertThat(e.getMessage()).startsWith("MALFORMED DATA: Supplied repository configuration is invalid:"); + } + } + + @Test + public void put_errorHandling_InvalidConfig() throws Exception { + request.setMethod(HttpMethod.PUT.name()); + request.setContentType(RDFFormat.NTRIPLES.getDefaultMIMEType()); + request.setContent(("_:node1 <" + RepositoryConfigSchema.REPOSITORYID + "> \"" + repositoryId + "\" .") + .getBytes(StandardCharsets.UTF_8)); + doThrow(new RepositoryConfigException("stub invalid")).when(manager).addRepositoryConfig(Mockito.any()); + + try { + controller.handleRequest(request, response); + fail("expected exception"); + } catch (ClientHTTPException e) { + assertThat(e.getStatusCode()).isEqualTo(400); + assertThat(e.getMessage()) + .startsWith("MALFORMED DATA: Supplied repository configuration is invalid: stub invalid"); + } + } +} diff --git a/tools/server-spring6/src/test/java/org/eclipse/rdf4j/http/server/repository/TupleQueryResultViewTest.java b/tools/server-spring6/src/test/java/org/eclipse/rdf4j/http/server/repository/TupleQueryResultViewTest.java new file mode 100644 index 00000000000..28f2670450e --- /dev/null +++ b/tools/server-spring6/src/test/java/org/eclipse/rdf4j/http/server/repository/TupleQueryResultViewTest.java @@ -0,0 +1,47 @@ +/******************************************************************************* + * Copyright (c) 2023 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.repository; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.util.HashMap; +import java.util.Map; + +import org.eclipse.rdf4j.query.QueryEvaluationException; +import org.eclipse.rdf4j.query.TupleQueryResult; +import org.eclipse.rdf4j.query.resultio.sparqljson.SPARQLResultsJSONWriterFactory; +import org.junit.jupiter.api.Test; +import org.springframework.mock.web.MockHttpServletRequest; +import org.springframework.mock.web.MockHttpServletResponse; + +public class TupleQueryResultViewTest { + + private static TupleQueryResultView view = TupleQueryResultView.getInstance(); + + @Test + public void testRender_QueryEvaluationError1() throws Exception { + var request = new MockHttpServletRequest(); + var response = new MockHttpServletResponse(); + + TupleQueryResult queryResult = mock(TupleQueryResult.class); + when(queryResult.hasNext()).thenThrow(QueryEvaluationException.class); + + Map model = new HashMap<>(); + model.put(TupleQueryResultView.FACTORY_KEY, new SPARQLResultsJSONWriterFactory()); + model.put(TupleQueryResultView.QUERY_RESULT_KEY, queryResult); + + view.render(model, request, response); + + assertThat(response.getStatus()).isEqualTo(500); + } +} diff --git a/tools/server-spring6/src/test/java/org/eclipse/rdf4j/http/server/repository/config/ConfigControllerTest.java b/tools/server-spring6/src/test/java/org/eclipse/rdf4j/http/server/repository/config/ConfigControllerTest.java new file mode 100644 index 00000000000..61d1d47c9d2 --- /dev/null +++ b/tools/server-spring6/src/test/java/org/eclipse/rdf4j/http/server/repository/config/ConfigControllerTest.java @@ -0,0 +1,90 @@ +/******************************************************************************* + * Copyright (c) 2019 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.repository.config; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.nio.charset.StandardCharsets; + +import org.eclipse.rdf4j.model.Model; +import org.eclipse.rdf4j.repository.config.RepositoryConfig; +import org.eclipse.rdf4j.repository.config.RepositoryConfigSchema; +import org.eclipse.rdf4j.repository.config.RepositoryConfigUtil; +import org.eclipse.rdf4j.repository.manager.RepositoryManager; +import org.eclipse.rdf4j.repository.sail.config.SailRepositoryConfig; +import org.eclipse.rdf4j.rio.RDFFormat; +import org.eclipse.rdf4j.sail.memory.config.MemoryStoreConfig; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.ArgumentCaptor; +import org.springframework.http.HttpMethod; +import org.springframework.mock.web.MockHttpServletRequest; +import org.springframework.mock.web.MockHttpServletResponse; +import org.springframework.web.servlet.ModelAndView; + +public class ConfigControllerTest { + + final String repositoryId = "test-config"; + final ConfigController controller = new ConfigController(); + + private MockHttpServletRequest request; + private MockHttpServletResponse response; + private RepositoryManager manager; + + @BeforeEach + public void setUp() { + request = new MockHttpServletRequest(); + request.setAttribute("repositoryID", repositoryId); + response = new MockHttpServletResponse(); + + manager = mock(RepositoryManager.class); + controller.setRepositoryManager(manager); + } + + @Test + public void getRequestRetrievesConfiguration() throws Exception { + request.setMethod(HttpMethod.GET.name()); + request.addHeader("Accept", RDFFormat.NTRIPLES.getDefaultMIMEType()); + + RepositoryConfig config = new RepositoryConfig(repositoryId, new SailRepositoryConfig(new MemoryStoreConfig())); + when(manager.getRepositoryConfig(repositoryId)).thenReturn(config); + + ModelAndView result = controller.handleRequest(request, response); + + verify(manager).getRepositoryConfig(repositoryId); + assertThat(result.getModel().containsKey(ConfigView.CONFIG_DATA_KEY)); + + Model resultData = (Model) result.getModel().get(ConfigView.CONFIG_DATA_KEY); + RepositoryConfig resultConfig = RepositoryConfigUtil.getRepositoryConfig(resultData, repositoryId); + assertThat(resultConfig).isNotNull(); + } + + @Test + public void postRequestModifiesConfiguration() throws Exception { + request.setMethod(HttpMethod.POST.name()); + request.setContentType(RDFFormat.NTRIPLES.getDefaultMIMEType()); + request.setContent( + ("_:node1 <" + RepositoryConfigSchema.REPOSITORYID + "> \"" + repositoryId + "\" .") + .getBytes(StandardCharsets.UTF_8)); + + when(manager.hasRepositoryConfig(repositoryId)).thenReturn(true); + + ArgumentCaptor config = ArgumentCaptor.forClass(RepositoryConfig.class); + + controller.handleRequest(request, new MockHttpServletResponse()); + + verify(manager).addRepositoryConfig(config.capture()); + assertThat(config.getValue().getID()).isEqualTo(repositoryId); + } +} diff --git a/tools/server-spring6/src/test/java/org/eclipse/rdf4j/http/server/repository/config/ConfigViewTest.java b/tools/server-spring6/src/test/java/org/eclipse/rdf4j/http/server/repository/config/ConfigViewTest.java new file mode 100644 index 00000000000..545551982f1 --- /dev/null +++ b/tools/server-spring6/src/test/java/org/eclipse/rdf4j/http/server/repository/config/ConfigViewTest.java @@ -0,0 +1,63 @@ +/******************************************************************************* + * Copyright (c) 2019 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.repository.config; + +import static org.assertj.core.api.Assertions.assertThat; + +import java.io.StringReader; +import java.util.LinkedHashMap; +import java.util.Map; + +import org.eclipse.rdf4j.model.Model; +import org.eclipse.rdf4j.model.impl.LinkedHashModelFactory; +import org.eclipse.rdf4j.model.vocabulary.RDF; +import org.eclipse.rdf4j.model.vocabulary.RDFS; +import org.eclipse.rdf4j.rio.RDFFormat; +import org.eclipse.rdf4j.rio.Rio; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.http.HttpMethod; +import org.springframework.mock.web.MockHttpServletRequest; +import org.springframework.mock.web.MockHttpServletResponse; + +public class ConfigViewTest { + + @BeforeEach + public void setUp() { + } + + @Test + public void testRender() throws Exception { + + ConfigView configView = ConfigView.getInstance(); + + Model configData = new LinkedHashModelFactory().createEmptyModel(); + configData.add(RDF.ALT, RDF.TYPE, RDFS.CLASS); + + Map map = new LinkedHashMap<>(); + map.put(ConfigView.HEADERS_ONLY, false); + map.put(ConfigView.CONFIG_DATA_KEY, configData); + map.put(ConfigView.FORMAT_KEY, RDFFormat.NTRIPLES); + + final MockHttpServletRequest request = new MockHttpServletRequest(); + request.setMethod(HttpMethod.GET.name()); + request.addHeader("Accept", RDFFormat.NTRIPLES.getDefaultMIMEType()); + + MockHttpServletResponse response = new MockHttpServletResponse(); + + configView.render(map, request, response); + + String ntriplesData = response.getContentAsString(); + Model renderedData = Rio.parse(new StringReader(ntriplesData), "", RDFFormat.NTRIPLES); + assertThat(renderedData).isNotEmpty(); + } + +} diff --git a/tools/server-spring6/src/test/java/org/eclipse/rdf4j/http/server/repository/namespaces/NamespaceControllerTest.java b/tools/server-spring6/src/test/java/org/eclipse/rdf4j/http/server/repository/namespaces/NamespaceControllerTest.java new file mode 100644 index 00000000000..bcdfd3dd41f --- /dev/null +++ b/tools/server-spring6/src/test/java/org/eclipse/rdf4j/http/server/repository/namespaces/NamespaceControllerTest.java @@ -0,0 +1,134 @@ +/******************************************************************************* + * Copyright (c) 2022 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.repository.namespaces; + +import static java.nio.charset.StandardCharsets.UTF_8; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import org.eclipse.rdf4j.common.webapp.views.EmptySuccessView; +import org.eclipse.rdf4j.http.server.ClientHTTPException; +import org.eclipse.rdf4j.repository.sail.SailRepository; +import org.eclipse.rdf4j.sail.memory.MemoryStore; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.EmptySource; +import org.junit.jupiter.params.provider.ValueSource; +import org.springframework.http.HttpMethod; +import org.springframework.mock.web.MockHttpServletRequest; +import org.springframework.mock.web.MockHttpServletResponse; +import org.springframework.web.servlet.ModelAndView; + +class NamespaceControllerTest { + private final static String REPO_ID = "test-repo"; + private final NamespaceController controller = new NamespaceController(); + + private MockHttpServletRequest request; + private MockHttpServletResponse response; + + @BeforeEach + public void setUp() { + request = new MockHttpServletRequest(); + request.setAttribute("repositoryID", REPO_ID); + request.setAttribute("repository", new SailRepository(new MemoryStore())); + request.setMethod(HttpMethod.PUT.name()); + response = new MockHttpServletResponse(); + } + + @ParameterizedTest + @EmptySource + @ValueSource(strings = { "a", "rdf", "rdf4j", "wn-6", "t_1", "t_", "a2", "a.3" }) + void addNamespace_prefix_ok(String prefix) throws Exception { + // Arrange + request.setRequestURI("/repositories/" + REPO_ID + "/namespaces/" + prefix); + request.setPathInfo(REPO_ID + "/namespaces/" + prefix); + request.setContent("http://www.w3.org/1999/02/22-rdf-syntax-ns#".getBytes(UTF_8)); + + // Act + final ModelAndView result = controller.handleRequest(request, response); + + // Assert + assertThat(result).isNotNull(); + assertThat(result.getView()).isInstanceOf(EmptySuccessView.class); + } + + @ParameterizedTest + @ValueSource(strings = { " ", "\t", "\n", "-", "rdf 4j", "_", "_t", "2a", "a+a", "a*a", "a@a" }) + void addNamespace_prefix_invalid(String prefix) { + // Arrange + request.setRequestURI("/repositories/" + REPO_ID + "/namespaces/" + prefix); + request.setPathInfo(REPO_ID + "/namespaces/" + prefix); + request.setContent("http://www.w3.org/1999/02/22-rdf-syntax-ns#".getBytes(UTF_8)); + + // Act & Assert + assertThatThrownBy(() -> controller.handleRequest(request, response)).isInstanceOf(ClientHTTPException.class) + .hasMessageContaining("Prefix not valid"); + } + + @ParameterizedTest + @ValueSource(strings = { + "http://www.w3.org/1999/02/22-rdf-syntax-ns#", + "http://www.w3.org/2001/XMLSchema-instance", + "http://purl.org/dc/elements/1.1/", + "http://rdfs.org/ns/void#", + "https://rdfs.org/ns/void#", + "ftp://rdfs.org/ns/void", + "ftps://rdfs.org/ns/void", + "http://example.org/with%20whitespace", + "http://", + "tttt://tttt.ttt", + "t://tttt", + "t:tttt", + "t:" + }) + void addNamespace_namespaceUri_ok(String namespaceUrl) throws Exception { + // Arrange + request.setRequestURI("/repositories/" + REPO_ID + "/namespaces/rdf4j"); + request.setPathInfo(REPO_ID + "/namespaces/rdf4j"); + request.setContent(namespaceUrl.getBytes(UTF_8)); + + // Act + final ModelAndView result = controller.handleRequest(request, response); + + // Assert + assertThat(result).isNotNull(); + assertThat(result.getView()).isInstanceOf(EmptySuccessView.class); + } + + @ParameterizedTest + @EmptySource + @ValueSource(strings = { " ", "\t", "\n" }) + void addNamespace_namespaceUri_empty(String namespaceUrl) { + // Arrange + request.setRequestURI("/repositories/" + REPO_ID + "/namespaces/rdf4j"); + request.setPathInfo(REPO_ID + "/namespaces/rdf4j"); + request.setContent(namespaceUrl.getBytes(UTF_8)); + + // Act & Assert + assertThatThrownBy(() -> controller.handleRequest(request, response)).isInstanceOf(ClientHTTPException.class) + .hasMessageContaining("No namespace name found in request body"); + } + + @ParameterizedTest + @ValueSource(strings = { "wwww3org", "httpwwww3org", "www.rdf4j.org/ns/void", "t", ":", " :", "\n:\n", + "http://example.org/with whitespace" }) + void addNamespace_namespaceUri_invalid(String namespaceUrl) { + // Arrange + request.setRequestURI("/repositories/" + REPO_ID + "/namespaces/rdf4j"); + request.setPathInfo(REPO_ID + "/namespaces/rdf4j"); + request.setContent(namespaceUrl.getBytes(UTF_8)); + + // Act & Assert + assertThatThrownBy(() -> controller.handleRequest(request, response)).isInstanceOf(ClientHTTPException.class) + .hasMessageContaining("Namespace not valid"); + } +} diff --git a/tools/server-spring6/src/test/java/org/eclipse/rdf4j/http/server/repository/statements/TestExportStatementsView.java b/tools/server-spring6/src/test/java/org/eclipse/rdf4j/http/server/repository/statements/TestExportStatementsView.java new file mode 100644 index 00000000000..0818a7f68dd --- /dev/null +++ b/tools/server-spring6/src/test/java/org/eclipse/rdf4j/http/server/repository/statements/TestExportStatementsView.java @@ -0,0 +1,70 @@ +/******************************************************************************* + * Copyright (c) 2022 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.repository.statements; + +import static jakarta.servlet.http.HttpServletResponse.SC_OK; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.util.Map; + +import org.eclipse.rdf4j.http.server.ServerHTTPException; +import org.eclipse.rdf4j.repository.RepositoryException; +import org.eclipse.rdf4j.rio.turtle.TurtleWriterFactory; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; +import org.springframework.http.HttpMethod; +import org.springframework.ui.ModelMap; + +public class TestExportStatementsView extends TestStatementsCommon { + public static final String REPOSITORY_ERROR_MSG = "Unable to get statements from Sail"; + private final ExportStatementsView exportStatementsView = ExportStatementsView.getInstance(); + private final Map model = new ModelMap(); + + @BeforeEach + public void initMocks() { + request.setMethod(HttpMethod.GET.name()); + model.put(ExportStatementsView.FACTORY_KEY, new TurtleWriterFactory()); + model.put(ExportStatementsView.USE_INFERENCING_KEY, false); + model.put(ExportStatementsView.HEADERS_ONLY, false); + + super.initMocks(); + } + + @Test + public void shouldReturnSC_OKIfNoExceptionIsThrown() throws Exception { + // act + exportStatementsView.render(model, request, response); + + assertEquals(SC_OK, response.getStatus()); + } + + @Test + public void shouldReturnSC_INTERNAL_SERVER_ERRORIfExceptionIsThrown() throws Exception { + Exception exception = null; + Mockito.doThrow(new RepositoryException(REPOSITORY_ERROR_MSG)) + .when(connectionMock) + .exportStatements(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any(Boolean.class), + Mockito.notNull(), Mockito.any()); + + try { + // act + exportStatementsView.render(model, request, response); + } catch (ServerHTTPException ex) { + exception = ex; + } + + assertNotNull(exception); + assertTrue(exception.getMessage().contains(REPOSITORY_ERROR_MSG)); + } +} diff --git a/tools/server-spring6/src/test/java/org/eclipse/rdf4j/http/server/repository/statements/TestStatementsCommon.java b/tools/server-spring6/src/test/java/org/eclipse/rdf4j/http/server/repository/statements/TestStatementsCommon.java new file mode 100644 index 00000000000..652415b982e --- /dev/null +++ b/tools/server-spring6/src/test/java/org/eclipse/rdf4j/http/server/repository/statements/TestStatementsCommon.java @@ -0,0 +1,35 @@ +/******************************************************************************* + * Copyright (c) 2022 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.repository.statements; + +import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.eclipse.rdf4j.rio.ParserConfig; +import org.junit.jupiter.api.BeforeEach; +import org.mockito.Mockito; +import org.springframework.mock.web.MockHttpServletRequest; +import org.springframework.mock.web.MockHttpServletResponse; + +public class TestStatementsCommon { + protected final MockHttpServletRequest request = new MockHttpServletRequest(); + protected final MockHttpServletResponse response = new MockHttpServletResponse(); + protected final Repository repMock = Mockito.mock(Repository.class); + protected final RepositoryConnection connectionMock = Mockito.mock(RepositoryConnection.class); + private final ParserConfig parserConfigMock = Mockito.mock(ParserConfig.class); + + @BeforeEach + public void initMocks() { + Mockito.when(repMock.getConnection()).thenReturn(connectionMock); + Mockito.when(connectionMock.getParserConfig()).thenReturn(parserConfigMock); + // repository interceptor uses this attribute + request.setAttribute("repository", repMock); + } +} diff --git a/tools/server-spring6/src/test/java/org/eclipse/rdf4j/http/server/repository/statements/TestStatementsController.java b/tools/server-spring6/src/test/java/org/eclipse/rdf4j/http/server/repository/statements/TestStatementsController.java new file mode 100644 index 00000000000..a5e5120b129 --- /dev/null +++ b/tools/server-spring6/src/test/java/org/eclipse/rdf4j/http/server/repository/statements/TestStatementsController.java @@ -0,0 +1,78 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + ******************************************************************************/ + +package org.eclipse.rdf4j.http.server.repository.statements; + +import static org.junit.jupiter.api.Assertions.assertTrue; + +import java.nio.charset.StandardCharsets; + +import org.eclipse.rdf4j.http.protocol.Protocol; +import org.eclipse.rdf4j.http.server.ClientHTTPException; +import org.eclipse.rdf4j.query.QueryLanguage; +import org.eclipse.rdf4j.query.Update; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; +import org.springframework.http.HttpMethod; + +/** + * @author jeen + */ +public class TestStatementsController extends TestStatementsCommon { + + private final StatementsController controller = new StatementsController(); + + @BeforeEach + public void initMocks() { + request.setMethod(HttpMethod.POST.name()); + + super.initMocks(); + } + + @Test + public void shouldUseTimeoutParameterForUpdateQueries() throws Exception { + final int maxExecution = 1; + request.setContentType(Protocol.SPARQL_UPDATE_MIME_TYPE); + request.addParameter(Protocol.TIMEOUT_PARAM_NAME, String.valueOf(maxExecution)); + final String updateString = "delete where { ?p ?o . }"; + request.setContent(updateString.getBytes(StandardCharsets.UTF_8)); + + final Update updateMock = Mockito.mock(Update.class); + Mockito.when(connectionMock.prepareUpdate(QueryLanguage.SPARQL, updateString, null)).thenReturn(updateMock); + + // act + controller.handleRequest(request, response); + + Mockito.verify(updateMock).setMaxExecutionTime(maxExecution); + } + + @Test + public void shouldThrowDescriptiveErrorOnEmpryUpdateQueries_SparqlUpdateMimeType() { + request.setContentType(Protocol.SPARQL_UPDATE_MIME_TYPE); + request.addParameter(Protocol.UPDATE_PARAM_NAME, ""); + Exception exception = Assertions.assertThrows(ClientHTTPException.class, () -> { + controller.handleRequest(request, response); + }); + assertTrue(exception.getMessage().contains("Updates must be non-empty")); + } + + @Test + public void shouldThrowDescriptiveErrorOnEmptyUpdateQueries_FormMimeType() { + request.setContentType(Protocol.FORM_MIME_TYPE); + request.addParameter(Protocol.UPDATE_PARAM_NAME, ""); + Exception exception = Assertions.assertThrows(ClientHTTPException.class, () -> { + controller.handleRequest(request, response); + }); + assertTrue(exception.getMessage().contains("Updates must be non-empty")); + } +} diff --git a/tools/server-spring6/src/test/java/org/eclipse/rdf4j/http/server/repository/transaction/TestActiveTransactionRegistry.java b/tools/server-spring6/src/test/java/org/eclipse/rdf4j/http/server/repository/transaction/TestActiveTransactionRegistry.java new file mode 100644 index 00000000000..7df63bdc705 --- /dev/null +++ b/tools/server-spring6/src/test/java/org/eclipse/rdf4j/http/server/repository/transaction/TestActiveTransactionRegistry.java @@ -0,0 +1,43 @@ +/******************************************************************************* + * Copyright (c) 2015 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + ******************************************************************************/ + +package org.eclipse.rdf4j.http.server.repository.transaction; + +import java.util.UUID; + +import org.eclipse.rdf4j.repository.Repository; +import org.junit.jupiter.api.BeforeEach; +import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class TestActiveTransactionRegistry { + + private static final Logger logger = LoggerFactory.getLogger(TestActiveTransactionRegistry.class); + + private ActiveTransactionRegistry registry; + + private Repository repository; + + private UUID txnId1; + + private UUID txnId2; + + /** + */ + @BeforeEach + public void setUp() { + System.setProperty(ActiveTransactionRegistry.CACHE_TIMEOUT_PROPERTY, "1"); + registry = ActiveTransactionRegistry.INSTANCE; + repository = Mockito.mock(Repository.class); + } + +} diff --git a/tools/server-spring6/src/test/java/org/eclipse/rdf4j/http/server/repository/transaction/TestBnodesUniquenessInTransactions.java b/tools/server-spring6/src/test/java/org/eclipse/rdf4j/http/server/repository/transaction/TestBnodesUniquenessInTransactions.java new file mode 100644 index 00000000000..3e368b49d36 --- /dev/null +++ b/tools/server-spring6/src/test/java/org/eclipse/rdf4j/http/server/repository/transaction/TestBnodesUniquenessInTransactions.java @@ -0,0 +1,144 @@ +/******************************************************************************* + * Copyright (c) 2022 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.repository.transaction; + +import java.io.File; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.util.List; +import java.util.UUID; + +import org.eclipse.rdf4j.common.io.FileUtil; +import org.eclipse.rdf4j.http.protocol.Protocol; +import org.eclipse.rdf4j.query.BindingSet; +import org.eclipse.rdf4j.query.QueryResults; +import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.RepositoryConnection; +import org.eclipse.rdf4j.repository.sail.SailRepository; +import org.eclipse.rdf4j.rio.RDFFormat; +import org.eclipse.rdf4j.sail.nativerdf.NativeStore; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.http.HttpMethod; +import org.springframework.mock.web.MockHttpServletRequest; +import org.springframework.mock.web.MockHttpServletResponse; + +/** + * Test for bnodes uniqueness which checks whether by default blank nodes are added with unique identifiers to + * transactions or their identifiers are preserved by specifying a parameter for server instructions when parsing + * request data. + * + * @author Denitsa Stoyanova + */ +public class TestBnodesUniquenessInTransactions { + + private MockHttpServletRequest request; + private MockHttpServletResponse response; + + private final String repositoryID = "test-repo"; + private File dataDir; + private Repository repository; + + private final String query = "select * where { \n" + + "\t?s ?p ?o .\n" + + "}"; + + @BeforeEach + public void setUp() throws IOException { + dataDir = Files.createTempDirectory(repositoryID).toFile(); + + repository = new SailRepository(new NativeStore(dataDir)); + repository.init(); + + request = new MockHttpServletRequest(); + response = new MockHttpServletResponse(); + } + + @AfterEach + public void tearDown() throws Exception { + repository.shutDown(); + FileUtil.deleteDir(dataDir); + } + + @Test + public void shouldImportUniqueBnodes() throws Exception { + executeTransactionAction(" _:c ."); + executeTransactionAction(" _:c ."); + + try (RepositoryConnection connection = repository.getConnection()) { + List result = QueryResults.asList(connection.prepareTupleQuery(query).evaluate()); + + Assertions.assertNotEquals(result.get(0).getValue("o").stringValue(), + result.get(1).getValue("o").stringValue()); + } + } + + @Test + public void shouldImportUniqueBnodesWithRequestParam() throws Exception { + request.setParameter(Protocol.PRESERVE_BNODE_ID_PARAM_NAME, "false"); + + executeTransactionAction(" _:c ."); + executeTransactionAction(" _:c ."); + + try (RepositoryConnection connection = repository.getConnection()) { + List result = QueryResults.asList(connection.prepareTupleQuery(query).evaluate()); + + Assertions.assertNotEquals(result.get(0).getValue("o").stringValue(), + result.get(1).getValue("o").stringValue()); + } + } + + @Test + public void shouldImportPreservedBnodes() throws Exception { + request.setParameter(Protocol.PRESERVE_BNODE_ID_PARAM_NAME, "true"); + + executeTransactionAction(" _:node ."); + executeTransactionAction(" _:node ."); + + try (RepositoryConnection connection = repository.getConnection()) { + List result = QueryResults.asList(connection.prepareTupleQuery(query).evaluate()); + + Assertions.assertEquals("node", result.get(0).getValue("o").stringValue()); + Assertions.assertEquals("node", result.get(1).getValue("o").stringValue()); + } + } + + /** + * Start a new transaction and add data to it using the default settings of parser config + * + * @param data is the content of the request which represents the data to be added to the repository + */ + private void executeTransactionAction(String data) throws Exception { + Transaction txn = new Transaction(repository); + ActiveTransactionRegistry.INSTANCE.register(txn); + + final UUID transactionId = txn.getID(); + + request.setRequestURI("/repositories/" + repositoryID + "/transactions/" + transactionId); + request.setPathInfo(repositoryID + "/transactions/" + transactionId); + request.setMethod(HttpMethod.PUT.name()); + request.setParameter(Protocol.ACTION_PARAM_NAME, "ADD"); + request.setContent(data.getBytes(StandardCharsets.UTF_8)); + request.setContentType(RDFFormat.TURTLE.getDefaultMIMEType()); + + TransactionController transactionController = new TransactionController(); + + response = new MockHttpServletResponse(); + transactionController.handleRequestInternal(request, response); + + txn.close(); + ActiveTransactionRegistry.INSTANCE.deregister(txn); + } + +} diff --git a/tools/server-spring6/src/test/java/org/eclipse/rdf4j/http/server/repository/transaction/TestTransactionControllerErrorHandling.java b/tools/server-spring6/src/test/java/org/eclipse/rdf4j/http/server/repository/transaction/TestTransactionControllerErrorHandling.java new file mode 100644 index 00000000000..2d40145c702 --- /dev/null +++ b/tools/server-spring6/src/test/java/org/eclipse/rdf4j/http/server/repository/transaction/TestTransactionControllerErrorHandling.java @@ -0,0 +1,104 @@ +/******************************************************************************* + * Copyright (c) 2022 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ + +package org.eclipse.rdf4j.http.server.repository.transaction; + +import static org.junit.jupiter.api.Assertions.assertThrows; + +import java.io.File; +import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.util.UUID; + +import org.eclipse.rdf4j.common.io.FileUtil; +import org.eclipse.rdf4j.http.protocol.Protocol; +import org.eclipse.rdf4j.http.server.ClientHTTPException; +import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.sail.SailRepository; +import org.eclipse.rdf4j.sail.nativerdf.NativeStore; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.http.HttpMethod; +import org.springframework.mock.web.MockHttpServletRequest; +import org.springframework.mock.web.MockHttpServletResponse; + +/** + * Test which checks whether the correct exception is thrown when some query is processed through + * {@link TransactionController TransactionController} + * + * @author Denitsa Stoyanova + */ +public class TestTransactionControllerErrorHandling { + private MockHttpServletRequest request; + private MockHttpServletResponse response; + private final String repositoryID = "test-repo"; + private File dataDir; + private Repository repository; + + @BeforeEach + public void setUp() throws IOException { + dataDir = Files.createTempDirectory(repositoryID).toFile(); + + repository = new SailRepository(new NativeStore(dataDir)); + repository.init(); + + request = new MockHttpServletRequest(); + response = new MockHttpServletResponse(); + } + + @AfterEach + public void tearDown() throws Exception { + repository.shutDown(); + FileUtil.deleteDir(dataDir); + } + + @Test + public void shouldThrowMalformedQueryExceptionForQuerySyntaxErrors() throws Exception { + String testQuery = "#PREFIX ex: \n" + + "select * where { \n" + + "\t?s ex:data ?o .\n" + + "}"; + + Transaction txn = new Transaction(repository); + ActiveTransactionRegistry.INSTANCE.register(txn); + + final UUID transactionId = txn.getID(); + + request.setRequestURI("/repositories/" + repositoryID + "/transactions/" + transactionId); + request.setPathInfo(repositoryID + "/transactions/" + transactionId); + request.setMethod(HttpMethod.PUT.name()); + request.setParameter(Protocol.ACTION_PARAM_NAME, "QUERY"); + request.setContentType("application/sparql-query; charset=utf-8"); + request.setContent(testQuery.getBytes(StandardCharsets.UTF_8)); + + TransactionController transactionController = new TransactionController(); + + response = new MockHttpServletResponse(); + + assertThrows(ClientHTTPException.class, () -> { + try { + transactionController.handleRequestInternal(request, response); + } catch (ClientHTTPException e) { + Assertions + .assertEquals("MALFORMED QUERY: org.eclipse.rdf4j.query.parser.sparql.ast.VisitorException: " + + "QName 'ex:data' uses an undefined prefix", e.getMessage()); + throw e; + } finally { + txn.close(); + ActiveTransactionRegistry.INSTANCE.deregister(txn); + } + }); + + } +} diff --git a/tools/server-spring6/src/test/java/org/eclipse/rdf4j/http/server/repository/transaction/TransactionStartControllerTest.java b/tools/server-spring6/src/test/java/org/eclipse/rdf4j/http/server/repository/transaction/TransactionStartControllerTest.java new file mode 100644 index 00000000000..afd2004940c --- /dev/null +++ b/tools/server-spring6/src/test/java/org/eclipse/rdf4j/http/server/repository/transaction/TransactionStartControllerTest.java @@ -0,0 +1,169 @@ +/******************************************************************************* + * Copyright (c) 2022 Eclipse RDF4J contributors. + * + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Distribution License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/org/documents/edl-v10.php. + * + * SPDX-License-Identifier: BSD-3-Clause + *******************************************************************************/ +package org.eclipse.rdf4j.http.server.repository.transaction; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatIllegalArgumentException; +import static org.assertj.core.api.Assertions.fail; +import static org.eclipse.rdf4j.common.transaction.IsolationLevels.READ_COMMITTED; +import static org.eclipse.rdf4j.common.transaction.IsolationLevels.SNAPSHOT; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +import java.util.HashMap; +import java.util.UUID; + +import org.eclipse.rdf4j.common.transaction.IsolationLevel; +import org.eclipse.rdf4j.common.transaction.IsolationLevels; +import org.eclipse.rdf4j.http.server.repository.RepositoryInterceptor; +import org.eclipse.rdf4j.repository.Repository; +import org.eclipse.rdf4j.repository.sail.SailRepository; +import org.eclipse.rdf4j.sail.memory.MemoryStore; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.springframework.http.HttpMethod; +import org.springframework.mock.web.MockHttpServletRequest; +import org.springframework.mock.web.MockHttpServletResponse; +import org.springframework.web.servlet.ModelAndView; + +class TransactionStartControllerTest { + private final static String REPO_ID = "test-repo"; + private final TransactionStartController controller = new TransactionStartController(); + + private MockHttpServletRequest request; + private MockHttpServletResponse response; + + @BeforeEach + public void setUp() { + request = new MockHttpServletRequest(); + request.setRequestURI("/repositories/" + REPO_ID + "/transactions"); + request.setAttribute("repositoryID", REPO_ID); + request.setAttribute("repository", new SailRepository(new MemoryStore())); + request.setMethod(HttpMethod.POST.name()); + response = new MockHttpServletResponse(); + } + + @Test + void createTransactionLocation_default() throws Exception { + // Arrange + controller.setExternalUrl(null); + + // Act + final ModelAndView result = controller.handleRequest(request, response); + + // Assert + assertThat(getHeaders(result).get("Location")) + .startsWith("http://localhost/repositories/test-repo/transactions/"); + } + + @Test + void createTransactionLocation_overrideExternalUrl() throws Exception { + // Arrange + controller.setExternalUrl("https://external-url.com/subpath/"); + + // Act + final ModelAndView result = controller.handleRequest(request, response); + + // Assert + assertThat(getHeaders(result).get("Location")).startsWith( + "https://external-url.com/subpath/repositories/test-repo/transactions/"); + } + + @Test + void createTransactionLocation_overrideExternalUrl_withoutEndingSlash() throws Exception { + // Arrange + controller.setExternalUrl("https://external-url.com/subpath"); + + // Act + final ModelAndView result = controller.handleRequest(request, response); + + // Assert + assertThat(getHeaders(result).get("Location")).startsWith( + "https://external-url.com/subpath/repositories/test-repo/transactions/"); + } + + private HashMap getHeaders(final ModelAndView result) { + if (result == null) { + return fail("Result is null"); + } + + return (HashMap) result.getModel().get("headers"); + } + + @Test + void positiveIsolationEnumsOldPath() { + for (IsolationLevel level : IsolationLevels.values()) { + request.addParameter("isolation-level", level.toString()); + + assertThat(controller.getIsolationLevel(request).size() == 1); + } + + } + + @Test + void negativeIsolationEnumsOldPath() { + request.addParameter("isolation-level", "GARBAGE"); + assertThatIllegalArgumentException().isThrownBy(() -> controller.getIsolationLevel(request)); + } + + @Test + void createTransactionLocation_withPositiveIsolationOldPath() throws Exception { + TransactionStartController controller = spy(TransactionStartController.class); + Transaction tx = mock(Transaction.class); + // Arrange + controller.setExternalUrl(null); + + request.addParameter("isolation-level", "SNAPSHOT"); + Repository repository = RepositoryInterceptor.getRepository(request); + + when(controller.createTransaction(repository)).thenReturn(tx); + when(tx.getID()).thenReturn(UUID.randomUUID()); + // Act + controller.handleRequest(request, response); + verify(tx).begin(SNAPSHOT); + } + + @Test + void createTransactionLocation_withIsolationOldPath() throws Exception { + TransactionStartController controller = spy(TransactionStartController.class); + Transaction tx = mock(Transaction.class); + // Arrange + controller.setExternalUrl(null); + + request.addParameter("isolation-level", "http://www.openrdf.org/schema/sesame#READ_COMMITTED"); + Repository repository = RepositoryInterceptor.getRepository(request); + + when(controller.createTransaction(repository)).thenReturn(tx); + when(tx.getID()).thenReturn(UUID.randomUUID()); + // Act + controller.handleRequest(request, response); + verify(tx).begin(READ_COMMITTED); + } + + @Test + void createTransactionLocation_withNegativeIsolationOldPath() throws Exception { + + TransactionStartController controller = spy(TransactionStartController.class); + Transaction tx = mock(Transaction.class); + controller.setExternalUrl(null); + + request.addParameter("isolation-level", "GARBAGE"); + Repository repository = RepositoryInterceptor.getRepository(request); + + when(controller.createTransaction(repository)).thenReturn(tx); + when(tx.getID()).thenReturn(UUID.randomUUID()); + + assertThatIllegalArgumentException().isThrownBy(() -> controller.handleRequest(request, response)); + } + +} diff --git a/tools/server-spring6/src/test/resources/logback-test.xml b/tools/server-spring6/src/test/resources/logback-test.xml new file mode 100644 index 00000000000..64b3764879e --- /dev/null +++ b/tools/server-spring6/src/test/resources/logback-test.xml @@ -0,0 +1,12 @@ + + + + + %d{HH:mm:ss.SSS} [%thread] %-5level %msg%n + + + + + + + diff --git a/tools/server-spring6/src/test/resources/navigation.xml b/tools/server-spring6/src/test/resources/navigation.xml new file mode 100644 index 00000000000..2a5ebc915be --- /dev/null +++ b/tools/server-spring6/src/test/resources/navigation.xml @@ -0,0 +1,24 @@ + + + + + + / + .view + + /images/icons/ + _ + .png + + + . + .title + + + + + + +