1
0
mirror of https://github.com/sismics/docs.git synced 2025-12-15 02:36:24 +00:00

55 Commits
v1.9 ... v1.11

Author SHA1 Message Date
bgamard
59597e962d 1.11 2023-03-12 13:58:03 +01:00
bgamard
c85a951a9e upgrade base image 2023-03-12 13:52:30 +01:00
bgamard
7f47a17633 upgrade jetty 2023-03-12 13:45:36 +01:00
bgamard
690c961a55 Merge remote-tracking branch 'origin/master' 2023-03-12 13:35:51 +01:00
bgamard
21efd1e4a7 Closes #658 2023-03-12 13:35:35 +01:00
@RandyMcMillan
ad27228429 docker-compose.yml: add example config (#665) 2023-02-20 11:51:39 +01:00
@RandyMcMillan
dd4a1667ca .gitignore: add docs/.gitkeep (#664) 2023-02-20 11:51:30 +01:00
@RandyMcMillan
399d2b7951 minor grammar corrections (#663) 2023-02-19 21:31:30 +01:00
bgamard
d51dfd6636 #647: fix doc 2022-08-26 18:18:06 +02:00
bgamard
ca85c1fa9f #647: always return OK on password lost route 2022-08-26 18:15:49 +02:00
bgamard
5e7f06070e keep filename in temporary file 2022-05-16 19:22:54 +02:00
bgamard
dc0c20cd0c moved tests 2022-05-16 18:53:08 +02:00
bgamard
98aa33341a moved tests 2022-05-16 18:50:19 +02:00
bgamard
1f7c0afc1e Closes #639: rework mime type resolution using java api 2022-05-16 18:44:26 +02:00
bgamard
1ccce3f942 rename 2022-05-05 18:15:24 +02:00
Uli
90d5bc8de7 Allow the . (dot) and @ (at) character in usernames (#637)
Co-authored-by: Uli Koeth <uli@kiot.eu>
2022-05-05 17:48:45 +02:00
bgamard
c6a685d7c0 Closes #620: delete a non-existing document should return 404 2022-04-17 13:35:29 +02:00
bgamard
e6cfd899e5 Closes #632: validate POST /app/config_inbox and update documentation 2022-04-17 13:23:22 +02:00
Julien Kirch
bd23f14792 Add doc for search syntax (#634) 2022-04-17 13:10:01 +02:00
Julien Kirch
46f6b9e537 Download zip of files not in same document (#591) 2022-04-15 10:18:39 +02:00
Julien Kirch
d5832c48e1 Small code cleaning 2022-03-21 11:36:25 +01:00
Julien Kirch
64ec0f63ca Add parameter to return the files when searching for a document (#582) 2022-03-20 11:36:28 +01:00
Ben Grabham
0b7c42e814 Check if environment variables are not empty strings as well as not null (#623) 2022-02-20 15:48:37 +01:00
bgamard
d8dc63fc98 Merge remote-tracking branch 'origin/master' 2022-02-02 21:18:06 +01:00
bgamard
81a7f154c2 logs only for admin 2022-02-02 21:17:58 +01:00
StaryVena
af3263d471 Add OCR support for Czech language (#613)
Co-authored-by: Vaclav Uher <vaclav.uher@bruker.com>
2022-01-26 15:27:14 +01:00
Dan Schaper
bbe5f19997 Tag latest on master, tag version on github tag. (#612)
Signed-off-by: Dan Schaper <dan.schaper@pi-hole.net>
2022-01-25 10:37:47 +01:00
Benjamin Gamard
f33650c099 fix action 2022-01-21 13:51:16 +01:00
Benjamin Gamard
58f81ec851 fix action 2022-01-21 13:37:31 +01:00
Dan Schaper
c9262eb204 Add build tags and labels (#608)
Fixes Docker images always build as 'latest' #607

Signed-off-by: Dan Schaper <dan.schaper@pi-hole.net>
2022-01-21 13:35:39 +01:00
bgamard
3637b832e5 test the new mime type detection 2022-01-17 14:37:22 +01:00
Joost Timmerman
ee56cfe2b4 Support audio mime (#574) 2022-01-17 14:24:50 +01:00
bgamard
721410c7d0 add test dependencies 2022-01-13 00:15:37 +01:00
bgamard
f0310e3933 add test dependencies 2022-01-13 00:06:29 +01:00
bgamard
302d7cccc4 run tests + fix docker username 2022-01-12 23:59:43 +01:00
Dan Schaper
f9977d5ce6 Actions workflow (#601)
Signed-off-by: Dan Schaper <dan@glacialmagma.com>
2022-01-12 23:49:34 +01:00
bgamard
0a927fd320 add application/x-www-form-urlencoded to delete requests 2022-01-02 16:46:20 +01:00
bgamard
523501a592 consumes application/x-www-form-urlencoded 2022-01-02 16:40:01 +01:00
bgamard
ff8155be6a upgrade docker image to use jetty 9.4.36 2022-01-02 16:06:36 +01:00
bgamard
6c5d697051 Merge remote-tracking branch 'origin/master' 2022-01-02 15:39:11 +01:00
bgamard
b19145160e release 1.10 2022-01-02 15:39:00 +01:00
Roland Illig
c7ada71ef5 proofread German translation (#566)
* plural forms
* spelling of composed words
* spaces between numbers and measurement units
* typographic ellipsis (\u2026) instead of three dots
2021-11-20 20:34:36 +01:00
bgamard
4951229576 escape ngTranslate parameters 2021-11-16 20:01:36 +01:00
Julien Kirch
d98c1bddec Add custom parameter for exact search by title 2021-10-12 13:50:32 +02:00
Dan Schaper
b0d0e93364 Remove duplicate tesseact language and alphabetize (#579)
Signed-off-by: Dan Schaper <dan@glacialmagma.com>
2021-09-30 13:23:58 +02:00
Benjamin Gamard
f20a562439 remove form url encoded from baseresource 2021-08-20 10:45:08 +02:00
Hung Nguyen
4ae8475f5e Add Vietnamese language support (#549) 2021-06-21 10:51:31 +02:00
Benjamin Gamard
fd4c627c61 remove travis 2021-05-12 19:38:58 +02:00
Benjamin Gamard
a867d48232 remove travis 2021-05-12 19:38:45 +02:00
Somebodyisnobody
f6bf61fce9 Update de.json (#532)
Fix typo
2021-03-31 19:08:58 +02:00
bgamard
c60c9a8f74 Merge remote-tracking branch 'origin/master' 2021-02-12 21:54:33 +01:00
bgamard
dc021ab71e Closes #520: downgrade H2 to 1.4.199 2021-02-12 21:54:25 +01:00
Pascal Pischel
18b5551f6c Fix german translation 2021-02-12 21:48:57 +01:00
bgamard
6fcd8771a5 upgrade to java 11 + upgrade libraries 2021-01-25 22:40:58 +01:00
bgamard
1fef4c3d2e next dev iteration + cleanup stress project 2021-01-25 21:31:14 +01:00
87 changed files with 1124 additions and 1142 deletions

84
.github/workflows/build-deploy.yml vendored Normal file
View File

@@ -0,0 +1,84 @@
name: Maven CI/CD
on:
push:
branches: [master]
tags: [v*]
workflow_dispatch:
jobs:
build_and_publish:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up JDK 11
uses: actions/setup-java@v2
with:
java-version: "11"
distribution: "temurin"
cache: maven
- name: Install test dependencies
run: sudo apt-get -y -q --no-install-recommends install ffmpeg mediainfo tesseract-ocr tesseract-ocr-deu
- name: Build with Maven
run: mvn -Pprod clean install
- name: Upload war artifact
uses: actions/upload-artifact@v2
with:
name: docs-web-ci.war
path: docs-web/target/docs*.war
build_docker_image:
name: Publish to Docker Hub
runs-on: ubuntu-latest
needs: [build_and_publish]
steps:
-
name: Checkout
uses: actions/checkout@v2
-
name: Download war artifact
uses: actions/download-artifact@v2
with:
name: docs-web-ci.war
path: docs-web/target
-
name: Setup up Docker Buildx
uses: docker/setup-buildx-action@v1
-
name: Login to DockerHub
if: github.event_name != 'pull_request'
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
-
name: Populate Docker metadata
id: metadata
uses: docker/metadata-action@v3
with:
images: sismics/docs
flavor: |
latest=false
tags: |
type=ref,event=tag
type=raw,value=latest,enable=${{ github.ref_type != 'tag' }}
labels: |
org.opencontainers.image.title = Teedy
org.opencontainers.image.description = Teedy is an open source, lightweight document management system for individuals and businesses.
org.opencontainers.image.created = ${{ github.event_created_at }}
org.opencontainers.image.author = Sismics
org.opencontainers.image.url = https://teedy.io/
org.opencontainers.image.vendor = Sismics
org.opencontainers.image.license = GPLv2
org.opencontainers.image.version = ${{ github.event_head_commit.id }}
-
name: Build and push
id: docker_build
uses: docker/build-push-action@v2
with:
context: .
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}

5
.gitignore vendored
View File

@@ -14,3 +14,8 @@ import_test
teedy-importer-linux teedy-importer-linux
teedy-importer-macos teedy-importer-macos
teedy-importer-win.exe teedy-importer-win.exe
docs/*
!docs/.gitkeep
#macos
.DS_Store

View File

@@ -1,33 +0,0 @@
sudo: required
dist: trusty
language: java
before_install:
- sudo add-apt-repository -y ppa:mc3man/trusty-media
- sudo apt-get -qq update
- sudo apt-get -y -q install ffmpeg mediainfo tesseract-ocr tesseract-ocr-fra tesseract-ocr-ita tesseract-ocr-kor tesseract-ocr-rus tesseract-ocr-ukr tesseract-ocr-spa tesseract-ocr-ara tesseract-ocr-hin tesseract-ocr-deu tesseract-ocr-pol tesseract-ocr-jpn tesseract-ocr-por tesseract-ocr-tha tesseract-ocr-jpn tesseract-ocr-chi-sim tesseract-ocr-chi-tra tesseract-ocr-nld tesseract-ocr-tur tesseract-ocr-heb tesseract-ocr-hun tesseract-ocr-fin tesseract-ocr-swe tesseract-ocr-lav tesseract-ocr-dan tesseract-ocr-nor
- sudo apt-get -y -q install haveged && sudo service haveged start
after_success:
- |
if [ "$TRAVIS_PULL_REQUEST" == "false" ]; then
mvn -Pprod -DskipTests clean install
docker login -u $DOCKER_USER -p $DOCKER_PASS
export REPO=sismics/docs
export TAG=`if [ "$TRAVIS_BRANCH" == "master" ]; then echo "latest"; else echo $TRAVIS_BRANCH ; fi`
docker build -f Dockerfile -t $REPO:$COMMIT .
docker tag $REPO:$COMMIT $REPO:$TAG
docker tag $REPO:$COMMIT $REPO:travis-$TRAVIS_BUILD_NUMBER
docker push $REPO
cd docs-importer
export REPO=sismics/docs-importer
export TAG=`if [ "$TRAVIS_BRANCH" == "master" ]; then echo "latest"; else echo $TRAVIS_BRANCH ; fi`
docker build -f Dockerfile -t $REPO:$COMMIT .
docker tag $REPO:$COMMIT $REPO:$TAG
docker tag $REPO:$COMMIT $REPO:travis-$TRAVIS_BUILD_NUMBER
docker push $REPO
fi
env:
global:
- secure: LRGpjWORb0qy6VuypZjTAfA8uRHlFUMTwb77cenS9PPRBxuSnctC531asS9Xg3DqC5nsRxBBprgfCKotn5S8nBSD1ceHh84NASyzLSBft3xSMbg7f/2i7MQ+pGVwLncusBU6E/drnMFwZBleo+9M8Tf96axY5zuUp90MUTpSgt0=
- secure: bCDDR6+I7PmSkuTYZv1HF/z98ANX/SFEESUCqxVmV5Gs0zFC0vQXaPJQ2xaJNRop1HZBFMZLeMMPleb0iOs985smpvK2F6Rbop9Tu+Vyo0uKqv9tbZ7F8Nfgnv9suHKZlL84FNeUQZJX6vsFIYPEJ/r7K5P/M0PdUy++fEwxEhU=
- secure: ewXnzbkgCIHpDWtaWGMa1OYZJ/ki99zcIl4jcDPIC0eB3njX/WgfcC6i0Ke9mLqDqwXarWJ6helm22sNh+xtQiz6isfBtBX+novfRt9AANrBe3koCMUemMDy7oh5VflBaFNP0DVb8LSCnwf6dx6ZB5E9EB8knvk40quc/cXpGjY=
- COMMIT=${TRAVIS_COMMIT::8}

View File

@@ -1,7 +1,37 @@
FROM sismics/ubuntu-jetty:9.4.12-2 FROM sismics/ubuntu-jetty:9.4.51
MAINTAINER b.gamard@sismics.com LABEL maintainer="b.gamard@sismics.com"
RUN apt-get update && apt-get -y -q install ffmpeg mediainfo tesseract-ocr tesseract-ocr-fra tesseract-ocr-ita tesseract-ocr-kor tesseract-ocr-rus tesseract-ocr-ukr tesseract-ocr-spa tesseract-ocr-ara tesseract-ocr-hin tesseract-ocr-deu tesseract-ocr-pol tesseract-ocr-jpn tesseract-ocr-por tesseract-ocr-tha tesseract-ocr-jpn tesseract-ocr-chi-sim tesseract-ocr-chi-tra tesseract-ocr-nld tesseract-ocr-tur tesseract-ocr-heb tesseract-ocr-hun tesseract-ocr-fin tesseract-ocr-swe tesseract-ocr-lav tesseract-ocr-dan tesseract-ocr-nor && \ RUN apt-get update && \
apt-get -y -q --no-install-recommends install \
ffmpeg \
mediainfo \
tesseract-ocr \
tesseract-ocr-ara \
tesseract-ocr-ces \
tesseract-ocr-chi-sim \
tesseract-ocr-chi-tra \
tesseract-ocr-dan \
tesseract-ocr-deu \
tesseract-ocr-fin \
tesseract-ocr-fra \
tesseract-ocr-heb \
tesseract-ocr-hin \
tesseract-ocr-hun \
tesseract-ocr-ita \
tesseract-ocr-jpn \
tesseract-ocr-kor \
tesseract-ocr-lav \
tesseract-ocr-nld \
tesseract-ocr-nor \
tesseract-ocr-pol \
tesseract-ocr-por \
tesseract-ocr-rus \
tesseract-ocr-spa \
tesseract-ocr-swe \
tesseract-ocr-tha \
tesseract-ocr-tur \
tesseract-ocr-ukr \
tesseract-ocr-vie && \
apt-get clean && rm -rf /var/lib/apt/lists/* apt-get clean && rm -rf /var/lib/apt/lists/*
# Remove the embedded javax.mail jar from Jetty # Remove the embedded javax.mail jar from Jetty

View File

@@ -3,7 +3,6 @@
</h3> </h3>
[![License: GPL v2](https://img.shields.io/badge/License-GPL%20v2-blue.svg)](https://www.gnu.org/licenses/old-licenses/gpl-2.0.en.html) [![License: GPL v2](https://img.shields.io/badge/License-GPL%20v2-blue.svg)](https://www.gnu.org/licenses/old-licenses/gpl-2.0.en.html)
[![Build Status](https://secure.travis-ci.org/sismics/docs.png)](http://travis-ci.org/sismics/docs)
Teedy is an open source, lightweight document management system for individuals and businesses. Teedy is an open source, lightweight document management system for individuals and businesses.
@@ -15,8 +14,7 @@ Teedy is an open source, lightweight document management system for individuals
![New!](https://teedy.io/img/laptop-demo.png?20180301) ![New!](https://teedy.io/img/laptop-demo.png?20180301)
Demo # Demo
----
A demo is available at [demo.teedy.io](https://demo.teedy.io) A demo is available at [demo.teedy.io](https://demo.teedy.io)
@@ -24,8 +22,7 @@ A demo is available at [demo.teedy.io](https://demo.teedy.io)
- "admin" login with "admin" password - "admin" login with "admin" password
- "demo" login with "password" password - "demo" login with "password" password
Features # Features
--------
- Responsive user interface - Responsive user interface
- Optical character recognition - Optical character recognition
@@ -55,21 +52,20 @@ Features
- [Bulk files importer](https://github.com/sismics/docs/tree/master/docs-importer) (single or scan mode) - [Bulk files importer](https://github.com/sismics/docs/tree/master/docs-importer) (single or scan mode)
- Tested to one million documents - Tested to one million documents
Install with Docker # Install with Docker
-------------------
A preconfigured Docker image is available, including OCR and media conversion tools, listening on port 8080. The database is an embedded H2 database but PostgreSQL is also supported for more performance. A preconfigured Docker image is available, including OCR and media conversion tools, listening on port 8080. The database is an embedded H2 database but PostgreSQL is also supported for more performance.
**The default admin password is "admin". Don't forget to change it before going to production.** **The default admin password is "admin". Don't forget to change it before going to production.**
- Master branch, can be unstable. Not recommended for production use: `sismics/docs:latest` - Master branch, can be unstable. Not recommended for production use: `sismics/docs:latest`
- Latest stable version: `sismics/docs:v1.8` - Latest stable version: `sismics/docs:v1.11`
The data directory is `/data`. Don't forget to mount a volume on it. The data directory is `/data`. Don't forget to mount a volume on it.
To build external URL, the server is expecting a `DOCS_BASE_URL` environment variable (for example https://teedy.mycompany.com) To build external URL, the server is expecting a `DOCS_BASE_URL` environment variable (for example https://teedy.mycompany.com)
### Available environment variables ## Available environment variables
- General - General
- `DOCS_BASE_URL`: The base url used by the application. Generated url's will be using this as base. - `DOCS_BASE_URL`: The base url used by the application. Generated url's will be using this as base.
@@ -95,18 +91,18 @@ To build external URL, the server is expecting a `DOCS_BASE_URL` environment var
- `DOCS_SMTP_USERNAME`: The username to be used. - `DOCS_SMTP_USERNAME`: The username to be used.
- `DOCS_SMTP_PASSWORD`: The password to be used. - `DOCS_SMTP_PASSWORD`: The password to be used.
### Examples ## Examples
In the following examples some passwords are exposed in cleartext. This was done in order to keep the examples simple. We strongly encourage you to use variables with an `.env` file or other means to securely store your passwords. In the following examples some passwords are exposed in cleartext. This was done in order to keep the examples simple. We strongly encourage you to use variables with an `.env` file or other means to securely store your passwords.
#### Using the internal database ### Using the internal database
```yaml ```yaml
version: '3' version: '3'
services: services:
# Teedy Application # Teedy Application
teedy-server: teedy-server:
image: sismics/docs:v1.8 image: sismics/docs:v1.11
restart: unless-stopped restart: unless-stopped
ports: ports:
# Map internal port to host # Map internal port to host
@@ -122,14 +118,14 @@ services:
- ./docs/data:/data - ./docs/data:/data
``` ```
#### Using PostgreSQL ### Using PostgreSQL
```yaml ```yaml
version: '3' version: '3'
services: services:
# Teedy Application # Teedy Application
teedy-server: teedy-server:
image: sismics/docs:v1.8 image: sismics/docs:v1.11
restart: unless-stopped restart: unless-stopped
ports: ports:
# Map internal port to host # Map internal port to host
@@ -180,26 +176,24 @@ networks:
driver: bridge driver: bridge
``` ```
Manual installation # Manual installation
-------------------
#### Requirements ## Requirements
- Java 8 with the [Java Cryptography Extension](http://www.oracle.com/technetwork/java/javase/downloads/jce-7-download-432124.html) - Java 11
- Tesseract 3 or 4 for OCR - Tesseract 4 for OCR
- ffmpeg for video thumbnails - ffmpeg for video thumbnails
- mediainfo for video metadata extraction - mediainfo for video metadata extraction
- A webapp server like [Jetty](http://eclipse.org/jetty/) or [Tomcat](http://tomcat.apache.org/) - A webapp server like [Jetty](http://eclipse.org/jetty/) or [Tomcat](http://tomcat.apache.org/)
#### Download ## Download
The latest release is downloadable here: <https://github.com/sismics/docs/releases> in WAR format. The latest release is downloadable here: <https://github.com/sismics/docs/releases> in WAR format.
**The default admin password is "admin". Don't forget to change it before going to production.** **The default admin password is "admin". Don't forget to change it before going to production.**
How to build Teedy from the sources ## How to build Teedy from the sources
----------------------------------
Prerequisites: JDK 8 with JCE, Maven 3, NPM, Grunt, Tesseract 3 or 4 Prerequisites: JDK 11, Maven 3, NPM, Grunt, Tesseract 4
Teedy is organized in several Maven modules: Teedy is organized in several Maven modules:
@@ -210,35 +204,39 @@ Teedy is organized in several Maven modules:
First off, clone the repository: `git clone git://github.com/sismics/docs.git` First off, clone the repository: `git clone git://github.com/sismics/docs.git`
or download the sources from GitHub. or download the sources from GitHub.
#### Launch the build ### Launch the build
From the root directory: From the root directory:
mvn clean -DskipTests install ```console
mvn clean -DskipTests install
```
#### Run a stand-alone version ### Run a stand-alone version
From the `docs-web` directory: From the `docs-web` directory:
mvn jetty:run ```console
mvn jetty:run
```
#### Build a .war to deploy to your servlet container ### Build a .war to deploy to your servlet container
From the `docs-web` directory: From the `docs-web` directory:
mvn -Pprod -DskipTests clean install ```console
mvn -Pprod -DskipTests clean install
```
You will get your deployable WAR in the `docs-web/target` directory. You will get your deployable WAR in the `docs-web/target` directory.
Contributing # Contributing
------------
All contributions are more than welcomed. Contributions may close an issue, fix a bug (reported or not reported), improve the existing code, add new feature, and so on. All contributions are more than welcomed. Contributions may close an issue, fix a bug (reported or not reported), improve the existing code, add new feature, and so on.
The `master` branch is the default and base branch for the project. It is used for development and all Pull Requests should go there. The `master` branch is the default and base branch for the project. It is used for development and all Pull Requests should go there.
License # License
-------
Teedy is released under the terms of the GPL license. See `COPYING` for more Teedy is released under the terms of the GPL license. See `COPYING` for more
information or see <http://opensource.org/licenses/GPL-2.0>. information or see <http://opensource.org/licenses/GPL-2.0>.

18
docker-compose.yml Normal file
View File

@@ -0,0 +1,18 @@
version: '3'
services:
# Teedy Application
teedy-server:
image: sismics/docs:v1.10
restart: unless-stopped
ports:
# Map internal port to host
- 8080:8080
environment:
# Base url to be used
DOCS_BASE_URL: "https://docs.example.com"
# Set the admin email
DOCS_ADMIN_EMAIL_INIT: "admin@example.com"
# Set the admin password (in this example: "superSecure")
DOCS_ADMIN_PASSWORD_INIT: "$$2a$$05$$PcMNUbJvsk7QHFSfEIDaIOjk1VI9/E7IPjTKx.jkjPxkx2EOKSoPS"
volumes:
- ./docs/data:/data

View File

@@ -5,7 +5,7 @@
<parent> <parent>
<groupId>com.sismics.docs</groupId> <groupId>com.sismics.docs</groupId>
<artifactId>docs-parent</artifactId> <artifactId>docs-parent</artifactId>
<version>1.9</version> <version>1.11</version>
<relativePath>..</relativePath> <relativePath>..</relativePath>
</parent> </parent>
@@ -95,7 +95,6 @@
<dependency> <dependency>
<groupId>at.favre.lib</groupId> <groupId>at.favre.lib</groupId>
<artifactId>bcrypt</artifactId> <artifactId>bcrypt</artifactId>
<version>0.9.0</version>
</dependency> </dependency>
<dependency> <dependency>

View File

@@ -43,7 +43,7 @@ public class Constants {
/** /**
* Supported document languages. * Supported document languages.
*/ */
public static final List<String> SUPPORTED_LANGUAGES = Lists.newArrayList("eng", "fra", "ita", "deu", "spa", "por", "pol", "rus", "ukr", "ara", "hin", "chi_sim", "chi_tra", "jpn", "tha", "kor", "nld", "tur", "heb", "hun", "fin", "swe", "lav", "dan", "nor"); public static final List<String> SUPPORTED_LANGUAGES = Lists.newArrayList("eng", "fra", "ita", "deu", "spa", "por", "pol", "rus", "ukr", "ara", "hin", "chi_sim", "chi_tra", "jpn", "tha", "kor", "nld", "tur", "heb", "hun", "fin", "swe", "lav", "dan", "nor", "vie", "ces");
/** /**
* Base URL environment variable. * Base URL environment variable.

View File

@@ -10,6 +10,7 @@ import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager; import javax.persistence.EntityManager;
import javax.persistence.NoResultException; import javax.persistence.NoResultException;
import javax.persistence.Query; import javax.persistence.Query;
import javax.persistence.TypedQuery;
import java.sql.Timestamp; import java.sql.Timestamp;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
@@ -50,10 +51,9 @@ public class DocumentDao {
* @param limit Limit * @param limit Limit
* @return List of documents * @return List of documents
*/ */
@SuppressWarnings("unchecked")
public List<Document> findAll(int offset, int limit) { public List<Document> findAll(int offset, int limit) {
EntityManager em = ThreadLocalContext.get().getEntityManager(); EntityManager em = ThreadLocalContext.get().getEntityManager();
Query q = em.createQuery("select d from Document d where d.deleteDate is null"); TypedQuery<Document> q = em.createQuery("select d from Document d where d.deleteDate is null", Document.class);
q.setFirstResult(offset); q.setFirstResult(offset);
q.setMaxResults(limit); q.setMaxResults(limit);
return q.getResultList(); return q.getResultList();
@@ -65,10 +65,9 @@ public class DocumentDao {
* @param userId User ID * @param userId User ID
* @return List of documents * @return List of documents
*/ */
@SuppressWarnings("unchecked")
public List<Document> findByUserId(String userId) { public List<Document> findByUserId(String userId) {
EntityManager em = ThreadLocalContext.get().getEntityManager(); EntityManager em = ThreadLocalContext.get().getEntityManager();
Query q = em.createQuery("select d from Document d where d.userId = :userId and d.deleteDate is null"); TypedQuery<Document> q = em.createQuery("select d from Document d where d.userId = :userId and d.deleteDate is null", Document.class);
q.setParameter("userId", userId); q.setParameter("userId", userId);
return q.getResultList(); return q.getResultList();
} }
@@ -138,16 +137,16 @@ public class DocumentDao {
EntityManager em = ThreadLocalContext.get().getEntityManager(); EntityManager em = ThreadLocalContext.get().getEntityManager();
// Get the document // Get the document
Query q = em.createQuery("select d from Document d where d.id = :id and d.deleteDate is null"); TypedQuery<Document> dq = em.createQuery("select d from Document d where d.id = :id and d.deleteDate is null", Document.class);
q.setParameter("id", id); dq.setParameter("id", id);
Document documentDb = (Document) q.getSingleResult(); Document documentDb = dq.getSingleResult();
// Delete the document // Delete the document
Date dateNow = new Date(); Date dateNow = new Date();
documentDb.setDeleteDate(dateNow); documentDb.setDeleteDate(dateNow);
// Delete linked data // Delete linked data
q = em.createQuery("update File f set f.deleteDate = :dateNow where f.documentId = :documentId and f.deleteDate is null"); Query q = em.createQuery("update File f set f.deleteDate = :dateNow where f.documentId = :documentId and f.deleteDate is null");
q.setParameter("documentId", id); q.setParameter("documentId", id);
q.setParameter("dateNow", dateNow); q.setParameter("dateNow", dateNow);
q.executeUpdate(); q.executeUpdate();
@@ -179,10 +178,10 @@ public class DocumentDao {
*/ */
public Document getById(String id) { public Document getById(String id) {
EntityManager em = ThreadLocalContext.get().getEntityManager(); EntityManager em = ThreadLocalContext.get().getEntityManager();
Query q = em.createQuery("select d from Document d where d.id = :id and d.deleteDate is null"); TypedQuery<Document> q = em.createQuery("select d from Document d where d.id = :id and d.deleteDate is null", Document.class);
q.setParameter("id", id); q.setParameter("id", id);
try { try {
return (Document) q.getSingleResult(); return q.getSingleResult();
} catch (NoResultException e) { } catch (NoResultException e) {
return null; return null;
} }
@@ -199,9 +198,9 @@ public class DocumentDao {
EntityManager em = ThreadLocalContext.get().getEntityManager(); EntityManager em = ThreadLocalContext.get().getEntityManager();
// Get the document // Get the document
Query q = em.createQuery("select d from Document d where d.id = :id and d.deleteDate is null"); TypedQuery<Document> q = em.createQuery("select d from Document d where d.id = :id and d.deleteDate is null", Document.class);
q.setParameter("id", document.getId()); q.setParameter("id", document.getId());
Document documentDb = (Document) q.getSingleResult(); Document documentDb = q.getSingleResult();
// Update the document // Update the document
documentDb.setTitle(document.getTitle()); documentDb.setTitle(document.getTitle());
@@ -237,7 +236,6 @@ public class DocumentDao {
query.setParameter("fileId", document.getFileId()); query.setParameter("fileId", document.getFileId());
query.setParameter("id", document.getId()); query.setParameter("id", document.getId());
query.executeUpdate(); query.executeUpdate();
} }
/** /**

View File

@@ -7,7 +7,8 @@ import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager; import javax.persistence.EntityManager;
import javax.persistence.NoResultException; import javax.persistence.NoResultException;
import javax.persistence.Query; import javax.persistence.TypedQuery;
import java.util.Collections;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
import java.util.UUID; import java.util.UUID;
@@ -47,10 +48,9 @@ public class FileDao {
* @param limit Limit * @param limit Limit
* @return List of files * @return List of files
*/ */
@SuppressWarnings("unchecked")
public List<File> findAll(int offset, int limit) { public List<File> findAll(int offset, int limit) {
EntityManager em = ThreadLocalContext.get().getEntityManager(); EntityManager em = ThreadLocalContext.get().getEntityManager();
Query q = em.createQuery("select f from File f where f.deleteDate is null"); TypedQuery<File> q = em.createQuery("select f from File f where f.deleteDate is null", File.class);
q.setFirstResult(offset); q.setFirstResult(offset);
q.setMaxResults(limit); q.setMaxResults(limit);
return q.getResultList(); return q.getResultList();
@@ -62,28 +62,38 @@ public class FileDao {
* @param userId User ID * @param userId User ID
* @return List of files * @return List of files
*/ */
@SuppressWarnings("unchecked")
public List<File> findByUserId(String userId) { public List<File> findByUserId(String userId) {
EntityManager em = ThreadLocalContext.get().getEntityManager(); EntityManager em = ThreadLocalContext.get().getEntityManager();
Query q = em.createQuery("select f from File f where f.userId = :userId and f.deleteDate is null"); TypedQuery<File> q = em.createQuery("select f from File f where f.userId = :userId and f.deleteDate is null", File.class);
q.setParameter("userId", userId); q.setParameter("userId", userId);
return q.getResultList(); return q.getResultList();
} }
/** /**
* Returns an active file. * Returns a list of active files.
*
* @param ids Files IDs
* @return List of files
*/
public List<File> getFiles(List<String> ids) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
TypedQuery<File> q = em.createQuery("select f from File f where f.id in :ids and f.deleteDate is null", File.class);
q.setParameter("ids", ids);
return q.getResultList();
}
/**
* Returns an active file or null.
* *
* @param id File ID * @param id File ID
* @return Document * @return File
*/ */
public File getFile(String id) { public File getFile(String id) {
EntityManager em = ThreadLocalContext.get().getEntityManager(); List<File> files = getFiles(List.of(id));
Query q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null"); if (files.isEmpty()) {
q.setParameter("id", id);
try {
return (File) q.getSingleResult();
} catch (NoResultException e) {
return null; return null;
} else {
return files.get(0);
} }
} }
@@ -92,15 +102,15 @@ public class FileDao {
* *
* @param id File ID * @param id File ID
* @param userId User ID * @param userId User ID
* @return Document * @return File
*/ */
public File getFile(String id, String userId) { public File getFile(String id, String userId) {
EntityManager em = ThreadLocalContext.get().getEntityManager(); EntityManager em = ThreadLocalContext.get().getEntityManager();
Query q = em.createQuery("select f from File f where f.id = :id and f.userId = :userId and f.deleteDate is null"); TypedQuery<File> q = em.createQuery("select f from File f where f.id = :id and f.userId = :userId and f.deleteDate is null", File.class);
q.setParameter("id", id); q.setParameter("id", id);
q.setParameter("userId", userId); q.setParameter("userId", userId);
try { try {
return (File) q.getSingleResult(); return q.getSingleResult();
} catch (NoResultException e) { } catch (NoResultException e) {
return null; return null;
} }
@@ -116,9 +126,9 @@ public class FileDao {
EntityManager em = ThreadLocalContext.get().getEntityManager(); EntityManager em = ThreadLocalContext.get().getEntityManager();
// Get the file // Get the file
Query q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null"); TypedQuery<File> q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null", File.class);
q.setParameter("id", id); q.setParameter("id", id);
File fileDb = (File) q.getSingleResult(); File fileDb = q.getSingleResult();
// Delete the file // Delete the file
Date dateNow = new Date(); Date dateNow = new Date();
@@ -138,9 +148,9 @@ public class FileDao {
EntityManager em = ThreadLocalContext.get().getEntityManager(); EntityManager em = ThreadLocalContext.get().getEntityManager();
// Get the file // Get the file
Query q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null"); TypedQuery<File> q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null", File.class);
q.setParameter("id", file.getId()); q.setParameter("id", file.getId());
File fileDb = (File) q.getSingleResult(); File fileDb = q.getSingleResult();
// Update the file // Update the file
fileDb.setDocumentId(file.getDocumentId()); fileDb.setDocumentId(file.getDocumentId());
@@ -162,32 +172,43 @@ public class FileDao {
*/ */
public File getActiveById(String id) { public File getActiveById(String id) {
EntityManager em = ThreadLocalContext.get().getEntityManager(); EntityManager em = ThreadLocalContext.get().getEntityManager();
Query q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null"); TypedQuery<File> q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null", File.class);
q.setParameter("id", id); q.setParameter("id", id);
try { try {
return (File) q.getSingleResult(); return q.getSingleResult();
} catch (NoResultException e) { } catch (NoResultException e) {
return null; return null;
} }
} }
/** /**
* Get files by document ID or all orphan files of an user. * Get files by document ID or all orphan files of a user.
* *
* @param userId User ID * @param userId User ID
* @param documentId Document ID * @param documentId Document ID
* @return List of files * @return List of files
*/ */
@SuppressWarnings("unchecked")
public List<File> getByDocumentId(String userId, String documentId) { public List<File> getByDocumentId(String userId, String documentId) {
EntityManager em = ThreadLocalContext.get().getEntityManager(); EntityManager em = ThreadLocalContext.get().getEntityManager();
if (documentId == null) { if (documentId == null) {
Query q = em.createQuery("select f from File f where f.documentId is null and f.deleteDate is null and f.latestVersion = true and f.userId = :userId order by f.createDate asc"); TypedQuery<File> q = em.createQuery("select f from File f where f.documentId is null and f.deleteDate is null and f.latestVersion = true and f.userId = :userId order by f.createDate asc", File.class);
q.setParameter("userId", userId); q.setParameter("userId", userId);
return q.getResultList(); return q.getResultList();
} else {
return getByDocumentsIds(Collections.singleton(documentId));
} }
Query q = em.createQuery("select f from File f where f.documentId = :documentId and f.latestVersion = true and f.deleteDate is null order by f.order asc"); }
q.setParameter("documentId", documentId);
/**
* Get files by documents IDs.
*
* @param documentIds Documents IDs
* @return List of files
*/
public List<File> getByDocumentsIds(Iterable<String> documentIds) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
TypedQuery<File> q = em.createQuery("select f from File f where f.documentId in :documentIds and f.latestVersion = true and f.deleteDate is null order by f.order asc", File.class);
q.setParameter("documentIds", documentIds);
return q.getResultList(); return q.getResultList();
} }
@@ -197,10 +218,9 @@ public class FileDao {
* @param versionId Version ID * @param versionId Version ID
* @return List of files * @return List of files
*/ */
@SuppressWarnings("unchecked")
public List<File> getByVersionId(String versionId) { public List<File> getByVersionId(String versionId) {
EntityManager em = ThreadLocalContext.get().getEntityManager(); EntityManager em = ThreadLocalContext.get().getEntityManager();
Query q = em.createQuery("select f from File f where f.versionId = :versionId and f.deleteDate is null order by f.order asc"); TypedQuery<File> q = em.createQuery("select f from File f where f.versionId = :versionId and f.deleteDate is null order by f.order asc", File.class);
q.setParameter("versionId", versionId); q.setParameter("versionId", versionId);
return q.getResultList(); return q.getResultList();
} }

View File

@@ -184,10 +184,8 @@ public class GroupDao {
criteriaList.add("g.GRP_DELETEDATE_D is null"); criteriaList.add("g.GRP_DELETEDATE_D is null");
if (!criteriaList.isEmpty()) { sb.append(" where ");
sb.append(" where "); sb.append(Joiner.on(" and ").join(criteriaList));
sb.append(Joiner.on(" and ").join(criteriaList));
}
// Perform the search // Perform the search
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria); QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);

View File

@@ -123,10 +123,8 @@ public class MetadataDao {
criteriaList.add("m.MET_DELETEDATE_D is null"); criteriaList.add("m.MET_DELETEDATE_D is null");
if (!criteriaList.isEmpty()) { sb.append(" where ");
sb.append(" where "); sb.append(Joiner.on(" and ").join(criteriaList));
sb.append(Joiner.on(" and ").join(criteriaList));
}
// Perform the search // Perform the search
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria); QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);

View File

@@ -64,10 +64,8 @@ public class RouteDao {
} }
criteriaList.add("r.RTE_DELETEDATE_D is null"); criteriaList.add("r.RTE_DELETEDATE_D is null");
if (!criteriaList.isEmpty()) { sb.append(" where ");
sb.append(" where "); sb.append(Joiner.on(" and ").join(criteriaList));
sb.append(Joiner.on(" and ").join(criteriaList));
}
// Perform the search // Perform the search
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria); QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);

View File

@@ -145,10 +145,8 @@ public class RouteModelDao {
criteriaList.add("rm.RTM_DELETEDATE_D is null"); criteriaList.add("rm.RTM_DELETEDATE_D is null");
if (!criteriaList.isEmpty()) { sb.append(" where ");
sb.append(" where "); sb.append(Joiner.on(" and ").join(criteriaList));
sb.append(Joiner.on(" and ").join(criteriaList));
}
// Perform the search // Perform the search
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria); QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);

View File

@@ -90,10 +90,8 @@ public class RouteStepDao {
} }
criteriaList.add("rs.RTP_DELETEDATE_D is null"); criteriaList.add("rs.RTP_DELETEDATE_D is null");
if (!criteriaList.isEmpty()) { sb.append(" where ");
sb.append(" where "); sb.append(Joiner.on(" and ").join(criteriaList));
sb.append(Joiner.on(" and ").join(criteriaList));
}
// Perform the search // Perform the search
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria); QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);

View File

@@ -19,7 +19,6 @@ public class ShareDao {
* *
* @param share Share * @param share Share
* @return New ID * @return New ID
* @throws Exception
*/ */
public String create(Share share) { public String create(Share share) {
// Create the UUID // Create the UUID

View File

@@ -199,10 +199,8 @@ public class TagDao {
criteriaList.add("t.TAG_DELETEDATE_D is null"); criteriaList.add("t.TAG_DELETEDATE_D is null");
if (!criteriaList.isEmpty()) { sb.append(" where ");
sb.append(" where "); sb.append(Joiner.on(" and ").join(criteriaList));
sb.append(Joiner.on(" and ").join(criteriaList));
}
// Perform the search // Perform the search
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria); QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);

View File

@@ -1,6 +1,7 @@
package com.sismics.docs.core.dao; package com.sismics.docs.core.dao;
import com.google.common.base.Joiner; import com.google.common.base.Joiner;
import com.google.common.base.Strings;
import at.favre.lib.crypto.bcrypt.BCrypt; import at.favre.lib.crypto.bcrypt.BCrypt;
import org.joda.time.DateTime; import org.joda.time.DateTime;
import org.slf4j.Logger; import org.slf4j.Logger;
@@ -289,7 +290,7 @@ public class UserDao {
private String hashPassword(String password) { private String hashPassword(String password) {
int bcryptWork = Constants.DEFAULT_BCRYPT_WORK; int bcryptWork = Constants.DEFAULT_BCRYPT_WORK;
String envBcryptWork = System.getenv(Constants.BCRYPT_WORK_ENV); String envBcryptWork = System.getenv(Constants.BCRYPT_WORK_ENV);
if (envBcryptWork != null) { if (!Strings.isNullOrEmpty(envBcryptWork)) {
try { try {
int envBcryptWorkInt = Integer.parseInt(envBcryptWork); int envBcryptWorkInt = Integer.parseInt(envBcryptWork);
if (envBcryptWorkInt >= 4 && envBcryptWorkInt <= 31) { if (envBcryptWorkInt >= 4 && envBcryptWorkInt <= 31) {

View File

@@ -20,7 +20,6 @@ public class VocabularyDao {
* *
* @param vocabulary Vocabulary * @param vocabulary Vocabulary
* @return New ID * @return New ID
* @throws Exception
*/ */
public String create(Vocabulary vocabulary) { public String create(Vocabulary vocabulary) {
// Create the UUID // Create the UUID

View File

@@ -42,10 +42,8 @@ public class WebhookDao {
} }
criteriaList.add("w.WHK_DELETEDATE_D is null"); criteriaList.add("w.WHK_DELETEDATE_D is null");
if (!criteriaList.isEmpty()) { sb.append(" where ");
sb.append(" where "); sb.append(Joiner.on(" and ").join(criteriaList));
sb.append(Joiner.on(" and ").join(criteriaList));
}
// Perform the search // Perform the search
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria); QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);

View File

@@ -1,5 +1,6 @@
package com.sismics.docs.core.dao.criteria; package com.sismics.docs.core.dao.criteria;
import java.util.ArrayList;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
@@ -49,13 +50,13 @@ public class DocumentCriteria {
* Tag IDs. * Tag IDs.
* The first level list will be AND'ed and the second level list will be OR'ed. * The first level list will be AND'ed and the second level list will be OR'ed.
*/ */
private List<List<String>> tagIdList; private List<List<String>> tagIdList = new ArrayList<>();
/** /**
* Tag IDs to excluded. * Tag IDs to exclude.
* The first and second level list will be excluded. * The first and second level list will be excluded.
*/ */
private List<List<String>> excludedTagIdList; private List<List<String>> excludedTagIdList = new ArrayList<>();
/** /**
* Shared status. * Shared status.
@@ -82,6 +83,11 @@ public class DocumentCriteria {
*/ */
private String mimeType; private String mimeType;
/**
* The title.
*/
private String title;
public List<String> getTargetIdList() { public List<String> getTargetIdList() {
return targetIdList; return targetIdList;
} }
@@ -126,19 +132,10 @@ public class DocumentCriteria {
return tagIdList; return tagIdList;
} }
public void setTagIdList(List<List<String>> tagIdList) {
this.tagIdList = tagIdList;
}
public List<List<String>> getExcludedTagIdList() { public List<List<String>> getExcludedTagIdList() {
return excludedTagIdList; return excludedTagIdList;
} }
public DocumentCriteria setExcludedTagIdList(List<List<String>> excludedTagIdList) {
this.excludedTagIdList = excludedTagIdList;
return this;
}
public Boolean getShared() { public Boolean getShared() {
return shared; return shared;
} }
@@ -163,10 +160,6 @@ public class DocumentCriteria {
this.creatorId = creatorId; this.creatorId = creatorId;
} }
public Boolean getActiveRoute() {
return activeRoute;
}
public Date getUpdateDateMin() { public Date getUpdateDateMin() {
return updateDateMin; return updateDateMin;
} }
@@ -183,6 +176,10 @@ public class DocumentCriteria {
this.updateDateMax = updateDateMax; this.updateDateMax = updateDateMax;
} }
public Boolean getActiveRoute() {
return activeRoute;
}
public void setActiveRoute(Boolean activeRoute) { public void setActiveRoute(Boolean activeRoute) {
this.activeRoute = activeRoute; this.activeRoute = activeRoute;
} }
@@ -194,4 +191,12 @@ public class DocumentCriteria {
public void setMimeType(String mimeType) { public void setMimeType(String mimeType) {
this.mimeType = mimeType; this.mimeType = mimeType;
} }
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
} }

View File

@@ -86,7 +86,7 @@ public class WebhookAsyncListener {
} }
}); });
RequestBody body = RequestBody.create(JSON, "{\"event\": \"" + event.name() + "\", \"id\": \"" + id + "\"}"); RequestBody body = RequestBody.create("{\"event\": \"" + event.name() + "\", \"id\": \"" + id + "\"}", JSON);
for (String webhookUrl : webhookUrlList) { for (String webhookUrl : webhookUrlList) {
Request request = new Request.Builder() Request request = new Request.Builder()

View File

@@ -1,5 +1,6 @@
package com.sismics.docs.core.model.context; package com.sismics.docs.core.model.context;
import com.google.common.base.Strings;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.eventbus.AsyncEventBus; import com.google.common.eventbus.AsyncEventBus;
import com.google.common.eventbus.EventBus; import com.google.common.eventbus.EventBus;
@@ -80,7 +81,7 @@ public class AppContext {
List<Class<? extends IndexingHandler>> indexingHandlerList = Lists.newArrayList( List<Class<? extends IndexingHandler>> indexingHandlerList = Lists.newArrayList(
new ClasspathScanner<IndexingHandler>().findClasses(IndexingHandler.class, "com.sismics.docs.core.util.indexing")); new ClasspathScanner<IndexingHandler>().findClasses(IndexingHandler.class, "com.sismics.docs.core.util.indexing"));
for (Class<? extends IndexingHandler> handlerClass : indexingHandlerList) { for (Class<? extends IndexingHandler> handlerClass : indexingHandlerList) {
IndexingHandler handler = handlerClass.newInstance(); IndexingHandler handler = handlerClass.getDeclaredConstructor().newInstance();
if (handler.accept()) { if (handler.accept()) {
indexingHandler = handler; indexingHandler = handler;
break; break;
@@ -106,7 +107,7 @@ public class AppContext {
// Change the admin password if needed // Change the admin password if needed
String envAdminPassword = System.getenv(Constants.ADMIN_PASSWORD_INIT_ENV); String envAdminPassword = System.getenv(Constants.ADMIN_PASSWORD_INIT_ENV);
if (envAdminPassword != null) { if (!Strings.isNullOrEmpty(envAdminPassword)) {
UserDao userDao = new UserDao(); UserDao userDao = new UserDao();
User adminUser = userDao.getById("admin"); User adminUser = userDao.getById("admin");
if (Constants.DEFAULT_ADMIN_PASSWORD.equals(adminUser.getPassword())) { if (Constants.DEFAULT_ADMIN_PASSWORD.equals(adminUser.getPassword())) {
@@ -117,7 +118,7 @@ public class AppContext {
// Change the admin email if needed // Change the admin email if needed
String envAdminEmail = System.getenv(Constants.ADMIN_EMAIL_INIT_ENV); String envAdminEmail = System.getenv(Constants.ADMIN_EMAIL_INIT_ENV);
if (envAdminEmail != null) { if (!Strings.isNullOrEmpty(envAdminEmail)) {
UserDao userDao = new UserDao(); UserDao userDao = new UserDao();
User adminUser = userDao.getById("admin"); User adminUser = userDao.getById("admin");
if (Constants.DEFAULT_ADMIN_EMAIL.equals(adminUser.getEmail())) { if (Constants.DEFAULT_ADMIN_EMAIL.equals(adminUser.getEmail())) {

View File

@@ -69,13 +69,18 @@ public class FileService extends AbstractScheduledService {
return Scheduler.newFixedDelaySchedule(0, 5, TimeUnit.SECONDS); return Scheduler.newFixedDelaySchedule(0, 5, TimeUnit.SECONDS);
} }
public Path createTemporaryFile() throws IOException {
return createTemporaryFile(null);
}
/** /**
* Create a temporary file. * Create a temporary file.
* *
* @param name Wanted file name
* @return New temporary file * @return New temporary file
*/ */
public Path createTemporaryFile() throws IOException { public Path createTemporaryFile(String name) throws IOException {
Path path = Files.createTempFile("sismics_docs", null); Path path = Files.createTempFile("sismics_docs", name);
referenceSet.add(new TemporaryPathReference(path, referenceQueue)); referenceSet.add(new TemporaryPathReference(path, referenceQueue));
return path; return path;
} }

View File

@@ -85,7 +85,7 @@ public class InboxService extends AbstractScheduledService {
lastSyncDate = new Date(); lastSyncDate = new Date();
lastSyncMessageCount = 0; lastSyncMessageCount = 0;
try { try {
HashMap<String, String> tagsNameToId = getAllTags(); Map<String, String> tagsNameToId = getAllTags();
inbox = openInbox(); inbox = openInbox();
Message[] messages = inbox.search(new FlagTerm(new Flags(Flags.Flag.SEEN), false)); Message[] messages = inbox.search(new FlagTerm(new Flags(Flags.Flag.SEEN), false));
@@ -192,7 +192,7 @@ public class InboxService extends AbstractScheduledService {
* @param message Message * @param message Message
* @throws Exception e * @throws Exception e
*/ */
private void importMessage(Message message, HashMap<String, String> tags) throws Exception { private void importMessage(Message message, Map<String, String> tags) throws Exception {
log.info("Importing message: " + message.getSubject()); log.info("Importing message: " + message.getSubject());
// Parse the mail // Parse the mail
@@ -273,16 +273,16 @@ public class InboxService extends AbstractScheduledService {
/** /**
* Fetches a HashMap with all tag names as keys and their respective ids as values. * Fetches a HashMap with all tag names as keys and their respective ids as values.
* *
* @return HashMap with all tags or null if not enabled * @return Map with all tags or null if not enabled
*/ */
private HashMap<String, String> getAllTags() { private Map<String, String> getAllTags() {
if (!ConfigUtil.getConfigBooleanValue(ConfigType.INBOX_AUTOMATIC_TAGS)) { if (!ConfigUtil.getConfigBooleanValue(ConfigType.INBOX_AUTOMATIC_TAGS)) {
return null; return null;
} }
TagDao tagDao = new TagDao(); TagDao tagDao = new TagDao();
List<TagDto> tags = tagDao.findByCriteria(new TagCriteria().setTargetIdList(null), new SortCriteria(1, true)); List<TagDto> tags = tagDao.findByCriteria(new TagCriteria().setTargetIdList(null), new SortCriteria(1, true));
HashMap<String, String> tagsNameToId = new HashMap<>(); Map<String, String> tagsNameToId = new HashMap<>();
for (TagDto tagDto : tags) { for (TagDto tagDto : tags) {
tagsNameToId.put(tagDto.getName(), tagDto.getId()); tagsNameToId.put(tagDto.getName(), tagDto.getId());
} }

View File

@@ -1,6 +1,5 @@
package com.sismics.docs.core.util; package com.sismics.docs.core.util;
import com.google.common.base.Charsets;
import com.google.common.base.Strings; import com.google.common.base.Strings;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.io.CharStreams; import com.google.common.io.CharStreams;
@@ -28,6 +27,7 @@ import java.awt.image.BufferedImage;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.io.InputStreamReader; import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.*; import java.util.*;
@@ -76,7 +76,7 @@ public class FileUtil {
// Consume the data as text // Consume the data as text
try (InputStream is = process.getInputStream()) { try (InputStream is = process.getInputStream()) {
return CharStreams.toString(new InputStreamReader(is, Charsets.UTF_8)); return CharStreams.toString(new InputStreamReader(is, StandardCharsets.UTF_8));
} }
} }

View File

@@ -1,8 +1,8 @@
package com.sismics.docs.core.util; package com.sismics.docs.core.util;
import com.google.common.collect.Lists;
import com.sismics.docs.core.dao.dto.TagDto; import com.sismics.docs.core.dao.dto.TagDto;
import java.util.ArrayList;
import java.util.List; import java.util.List;
/** /**
@@ -12,14 +12,14 @@ import java.util.List;
*/ */
public class TagUtil { public class TagUtil {
/** /**
* Recursively find children of a tags. * Recursively find children of a tag.
* *
* @param parentTagDto Parent tag * @param parentTagDto Parent tag
* @param allTagDtoList List of all tags * @param allTagDtoList List of all tags
* @return Children tags * @return Children tags
*/ */
public static List<TagDto> findChildren(TagDto parentTagDto, List<TagDto> allTagDtoList) { public static List<TagDto> findChildren(TagDto parentTagDto, List<TagDto> allTagDtoList) {
List<TagDto> childrenTagDtoList = Lists.newArrayList(); List<TagDto> childrenTagDtoList = new ArrayList<>();
for (TagDto tagDto : allTagDtoList) { for (TagDto tagDto : allTagDtoList) {
if (parentTagDto.getId().equals(tagDto.getParentId())) { if (parentTagDto.getId().equals(tagDto.getParentId())) {
@@ -32,15 +32,15 @@ public class TagUtil {
} }
/** /**
* Find tags by name (start with). * Find tags by name (start with, ignore case).
* *
* @param name Name * @param name Name
* @param allTagDtoList List of all tags * @param allTagDtoList List of all tags
* @return List of filtered tags * @return List of filtered tags
*/ */
public static List<TagDto> findByName(String name, List<TagDto> allTagDtoList) { public static List<TagDto> findByName(String name, List<TagDto> allTagDtoList) {
List<TagDto> tagDtoList = Lists.newArrayList(); List<TagDto> tagDtoList = new ArrayList<>();
if (name == null || name.isEmpty()) { if (name.isEmpty()) {
return tagDtoList; return tagDtoList;
} }
name = name.toLowerCase(); name = name.toLowerCase();

View File

@@ -20,7 +20,7 @@ public class AuthenticationUtil {
.map(clazz -> { .map(clazz -> {
try { try {
return clazz.newInstance(); return clazz.getDeclaredConstructor().newInstance();
} catch (Exception e) { } catch (Exception e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }

View File

@@ -8,7 +8,6 @@ import com.sismics.docs.core.model.jpa.Config;
import com.sismics.docs.core.model.jpa.User; import com.sismics.docs.core.model.jpa.User;
import com.sismics.docs.core.util.ConfigUtil; import com.sismics.docs.core.util.ConfigUtil;
import com.sismics.util.ClasspathScanner; import com.sismics.util.ClasspathScanner;
import org.apache.commons.pool.impl.GenericObjectPool;
import org.apache.directory.api.ldap.model.cursor.EntryCursor; import org.apache.directory.api.ldap.model.cursor.EntryCursor;
import org.apache.directory.api.ldap.model.entry.Attribute; import org.apache.directory.api.ldap.model.entry.Attribute;
import org.apache.directory.api.ldap.model.entry.Entry; import org.apache.directory.api.ldap.model.entry.Entry;
@@ -71,10 +70,7 @@ public class LdapAuthenticationHandler implements AuthenticationHandler {
config.setCredentials(ConfigUtil.getConfigStringValue(ConfigType.LDAP_ADMIN_PASSWORD)); config.setCredentials(ConfigUtil.getConfigStringValue(ConfigType.LDAP_ADMIN_PASSWORD));
DefaultLdapConnectionFactory factory = new DefaultLdapConnectionFactory(config); DefaultLdapConnectionFactory factory = new DefaultLdapConnectionFactory(config);
GenericObjectPool.Config poolConfig = new GenericObjectPool.Config(); pool = new LdapConnectionPool(new ValidatingPoolableLdapConnectionFactory(factory), null);
poolConfig.whenExhaustedAction = GenericObjectPool.WHEN_EXHAUSTED_GROW;
poolConfig.maxWait = 500;
pool = new LdapConnectionPool(new ValidatingPoolableLdapConnectionFactory(factory), poolConfig);
} }
@Override @Override
@@ -114,7 +110,7 @@ public class LdapAuthenticationHandler implements AuthenticationHandler {
if (mailAttribute == null || mailAttribute.get() == null) { if (mailAttribute == null || mailAttribute.get() == null) {
user.setEmail(ConfigUtil.getConfigStringValue(ConfigType.LDAP_DEFAULT_EMAIL)); user.setEmail(ConfigUtil.getConfigStringValue(ConfigType.LDAP_DEFAULT_EMAIL));
} else { } else {
Value<?> value = mailAttribute.get(); Value value = mailAttribute.get();
user.setEmail(value.getString()); user.setEmail(value.getString());
} }
user.setStorageQuota(ConfigUtil.getConfigLongValue(ConfigType.LDAP_DEFAULT_STORAGE)); user.setStorageQuota(ConfigUtil.getConfigLongValue(ConfigType.LDAP_DEFAULT_STORAGE));

View File

@@ -3,6 +3,7 @@ package com.sismics.docs.core.util.format;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.sismics.util.ClasspathScanner; import com.sismics.util.ClasspathScanner;
import java.lang.reflect.InvocationTargetException;
import java.util.List; import java.util.List;
/** /**
@@ -26,12 +27,12 @@ public class FormatHandlerUtil {
public static FormatHandler find(String mimeType) { public static FormatHandler find(String mimeType) {
try { try {
for (Class<? extends FormatHandler> formatHandlerClass : FORMAT_HANDLERS) { for (Class<? extends FormatHandler> formatHandlerClass : FORMAT_HANDLERS) {
FormatHandler formatHandler = formatHandlerClass.newInstance(); FormatHandler formatHandler = formatHandlerClass.getDeclaredConstructor().newInstance();
if (formatHandler.accept(mimeType)) { if (formatHandler.accept(mimeType)) {
return formatHandler; return formatHandler;
} }
} }
} catch (InstantiationException | IllegalAccessException e) { } catch (Exception e) {
return null; return null;
} }

View File

@@ -9,7 +9,7 @@ import org.apache.pdfbox.pdmodel.PDPageContentStream;
import org.apache.pdfbox.pdmodel.common.PDRectangle; import org.apache.pdfbox.pdmodel.common.PDRectangle;
import org.apache.pdfbox.pdmodel.graphics.image.LosslessFactory; import org.apache.pdfbox.pdmodel.graphics.image.LosslessFactory;
import org.apache.pdfbox.pdmodel.graphics.image.PDImageXObject; import org.apache.pdfbox.pdmodel.graphics.image.PDImageXObject;
import org.apache.poi.xslf.extractor.XSLFPowerPointExtractor; import org.apache.poi.sl.extractor.SlideShowExtractor;
import org.apache.poi.xslf.usermodel.XMLSlideShow; import org.apache.poi.xslf.usermodel.XMLSlideShow;
import org.apache.poi.xslf.usermodel.XSLFSlide; import org.apache.poi.xslf.usermodel.XSLFSlide;
@@ -50,7 +50,7 @@ public class PptxFormatHandler implements FormatHandler {
@Override @Override
public String extractContent(String language, Path file) throws Exception { public String extractContent(String language, Path file) throws Exception {
XMLSlideShow pptx = loadPPtxFile(file); XMLSlideShow pptx = loadPPtxFile(file);
return new XSLFPowerPointExtractor(pptx).getText(); return new SlideShowExtractor<>(pptx).getText();
} }
@Override @Override

View File

@@ -11,6 +11,7 @@ import org.apache.pdfbox.pdmodel.PDDocument;
import java.awt.image.BufferedImage; import java.awt.image.BufferedImage;
import java.io.OutputStream; import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
@@ -33,7 +34,7 @@ public class TextPlainFormatHandler implements FormatHandler {
PdfWriter.getInstance(output, pdfOutputStream); PdfWriter.getInstance(output, pdfOutputStream);
output.open(); output.open();
String content = new String(Files.readAllBytes(file), Charsets.UTF_8); String content = Files.readString(file, StandardCharsets.UTF_8);
Font font = FontFactory.getFont("LiberationMono-Regular"); Font font = FontFactory.getFont("LiberationMono-Regular");
Paragraph paragraph = new Paragraph(content, font); Paragraph paragraph = new Paragraph(content, font);
paragraph.setAlignment(Element.ALIGN_LEFT); paragraph.setAlignment(Element.ALIGN_LEFT);
@@ -46,7 +47,7 @@ public class TextPlainFormatHandler implements FormatHandler {
@Override @Override
public String extractContent(String language, Path file) throws Exception { public String extractContent(String language, Path file) throws Exception {
return new String(Files.readAllBytes(file), "UTF-8"); return Files.readString(file, StandardCharsets.UTF_8);
} }
@Override @Override

View File

@@ -1,6 +1,5 @@
package com.sismics.docs.core.util.format; package com.sismics.docs.core.util.format;
import com.google.common.base.Charsets;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.io.ByteStreams; import com.google.common.io.ByteStreams;
import com.google.common.io.Closer; import com.google.common.io.Closer;
@@ -13,6 +12,7 @@ import javax.imageio.ImageIO;
import java.awt.image.BufferedImage; import java.awt.image.BufferedImage;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
@@ -65,7 +65,7 @@ public class VideoFormatHandler implements FormatHandler {
// Consume the data as a string // Consume the data as a string
try (InputStream is = process.getInputStream()) { try (InputStream is = process.getInputStream()) {
return new String(ByteStreams.toByteArray(is), Charsets.UTF_8); return new String(ByteStreams.toByteArray(is), StandardCharsets.UTF_8);
} catch (Exception e) { } catch (Exception e) {
return null; return null;
} }

View File

@@ -37,9 +37,9 @@ import org.apache.lucene.search.spell.LuceneDictionary;
import org.apache.lucene.search.suggest.Lookup; import org.apache.lucene.search.suggest.Lookup;
import org.apache.lucene.search.suggest.analyzing.FuzzySuggester; import org.apache.lucene.search.suggest.analyzing.FuzzySuggester;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.store.NIOFSDirectory;
import org.apache.lucene.store.NoLockFactory; import org.apache.lucene.store.NoLockFactory;
import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.store.SimpleFSDirectory;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@@ -116,7 +116,7 @@ public class LuceneIndexingHandler implements IndexingHandler {
} else if (luceneStorage.equals("FILE")) { } else if (luceneStorage.equals("FILE")) {
Path luceneDirectory = DirectoryUtil.getLuceneDirectory(); Path luceneDirectory = DirectoryUtil.getLuceneDirectory();
log.info("Using file Lucene storage: {}", luceneDirectory); log.info("Using file Lucene storage: {}", luceneDirectory);
directory = new SimpleFSDirectory(luceneDirectory, NoLockFactory.INSTANCE); directory = new NIOFSDirectory(luceneDirectory, NoLockFactory.INSTANCE);
} }
// Create an index writer // Create an index writer
@@ -295,7 +295,11 @@ public class LuceneIndexingHandler implements IndexingHandler {
criteriaList.add("d.DOC_UPDATEDATE_D <= :updateDateMax"); criteriaList.add("d.DOC_UPDATEDATE_D <= :updateDateMax");
parameterMap.put("updateDateMax", criteria.getUpdateDateMax()); parameterMap.put("updateDateMax", criteria.getUpdateDateMax());
} }
if (criteria.getTagIdList() != null && !criteria.getTagIdList().isEmpty()) { if (criteria.getTitle() != null) {
criteriaList.add("d.DOC_TITLE_C = :title");
parameterMap.put("title", criteria.getTitle());
}
if (!criteria.getTagIdList().isEmpty()) {
int index = 0; int index = 0;
for (List<String> tagIdList : criteria.getTagIdList()) { for (List<String> tagIdList : criteria.getTagIdList()) {
List<String> tagCriteriaList = Lists.newArrayList(); List<String> tagCriteriaList = Lists.newArrayList();

View File

@@ -29,6 +29,7 @@ import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.io.StringReader; import java.io.StringReader;
import java.io.StringWriter; import java.io.StringWriter;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.StandardCopyOption; import java.nio.file.StandardCopyOption;
@@ -87,12 +88,12 @@ public class EmailUtil {
try { try {
// Build email headers // Build email headers
HtmlEmail email = new HtmlEmail(); HtmlEmail email = new HtmlEmail();
email.setCharset("UTF-8"); email.setCharset(StandardCharsets.UTF_8.name());
ConfigDao configDao = new ConfigDao(); ConfigDao configDao = new ConfigDao();
// Hostname // Hostname
String envHostname = System.getenv(Constants.SMTP_HOSTNAME_ENV); String envHostname = System.getenv(Constants.SMTP_HOSTNAME_ENV);
if (envHostname == null) { if (Strings.isNullOrEmpty(envHostname)) {
email.setHostName(ConfigUtil.getConfigStringValue(ConfigType.SMTP_HOSTNAME)); email.setHostName(ConfigUtil.getConfigStringValue(ConfigType.SMTP_HOSTNAME));
} else { } else {
email.setHostName(envHostname); email.setHostName(envHostname);
@@ -101,7 +102,7 @@ public class EmailUtil {
// Port // Port
int port = ConfigUtil.getConfigIntegerValue(ConfigType.SMTP_PORT); int port = ConfigUtil.getConfigIntegerValue(ConfigType.SMTP_PORT);
String envPort = System.getenv(Constants.SMTP_PORT_ENV); String envPort = System.getenv(Constants.SMTP_PORT_ENV);
if (envPort != null) { if (!Strings.isNullOrEmpty(envPort)) {
port = Integer.valueOf(envPort); port = Integer.valueOf(envPort);
} }
email.setSmtpPort(port); email.setSmtpPort(port);
@@ -114,7 +115,7 @@ public class EmailUtil {
// Username and password // Username and password
String envUsername = System.getenv(Constants.SMTP_USERNAME_ENV); String envUsername = System.getenv(Constants.SMTP_USERNAME_ENV);
String envPassword = System.getenv(Constants.SMTP_PASSWORD_ENV); String envPassword = System.getenv(Constants.SMTP_PASSWORD_ENV);
if (envUsername == null || envPassword == null) { if (Strings.isNullOrEmpty(envUsername) || Strings.isNullOrEmpty(envPassword)) {
Config usernameConfig = configDao.getById(ConfigType.SMTP_USERNAME); Config usernameConfig = configDao.getById(ConfigType.SMTP_USERNAME);
Config passwordConfig = configDao.getById(ConfigType.SMTP_PASSWORD); Config passwordConfig = configDao.getById(ConfigType.SMTP_PASSWORD);
if (usernameConfig != null && passwordConfig != null) { if (usernameConfig != null && passwordConfig != null) {

View File

@@ -1,6 +1,6 @@
package com.sismics.util; package com.sismics.util;
import org.jsoup.helper.StringUtil; import org.jsoup.internal.StringUtil;
import org.jsoup.nodes.Element; import org.jsoup.nodes.Element;
import org.jsoup.nodes.Node; import org.jsoup.nodes.Node;
import org.jsoup.nodes.TextNode; import org.jsoup.nodes.TextNode;
@@ -28,7 +28,7 @@ public class HtmlToPlainText {
} }
// the formatting rules, implemented in a breadth-first DOM traverse // the formatting rules, implemented in a breadth-first DOM traverse
private class FormattingVisitor implements NodeVisitor { static private class FormattingVisitor implements NodeVisitor {
private static final int maxWidth = 80; private static final int maxWidth = 80;
private int width = 0; private int width = 0;
private StringBuilder accum = new StringBuilder(); // holds the accumulated text private StringBuilder accum = new StringBuilder(); // holds the accumulated text
@@ -64,7 +64,7 @@ public class HtmlToPlainText {
return; // don't accumulate long runs of empty spaces return; // don't accumulate long runs of empty spaces
if (text.length() + width > maxWidth) { // won't fit, needs to wrap if (text.length() + width > maxWidth) { // won't fit, needs to wrap
String words[] = text.split("\\s+"); String[] words = text.split("\\s+");
for (int i = 0; i < words.length; i++) { for (int i = 0; i < words.length; i++) {
String word = words[i]; String word = words[i];
boolean last = i == words.length - 1; boolean last = i == words.length - 1;

View File

@@ -1,6 +1,5 @@
package com.sismics.util; package com.sismics.util;
import com.google.common.base.Charsets;
import com.google.common.hash.Hashing; import com.google.common.hash.Hashing;
import javax.imageio.IIOImage; import javax.imageio.IIOImage;
@@ -13,6 +12,7 @@ import java.awt.image.BufferedImage;
import java.awt.image.WritableRaster; import java.awt.image.WritableRaster;
import java.io.IOException; import java.io.IOException;
import java.io.OutputStream; import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.util.Iterator; import java.util.Iterator;
/** /**
@@ -80,7 +80,7 @@ public class ImageUtil {
} }
return Hashing.md5().hashString( return Hashing.md5().hashString(
email.trim().toLowerCase(), Charsets.UTF_8) email.trim().toLowerCase(), StandardCharsets.UTF_8)
.toString(); .toString();
} }

View File

@@ -8,6 +8,7 @@ import java.io.IOException;
import java.net.URISyntaxException; import java.net.URISyntaxException;
import java.net.URL; import java.net.URL;
import java.net.URLDecoder; import java.net.URLDecoder;
import java.nio.charset.StandardCharsets;
import java.text.MessageFormat; import java.text.MessageFormat;
import java.util.*; import java.util.*;
import java.util.jar.JarEntry; import java.util.jar.JarEntry;
@@ -53,7 +54,7 @@ public class ResourceUtil {
// Extract the JAR path // Extract the JAR path
String jarPath = dirUrl.getPath().substring(5, dirUrl.getPath().indexOf("!")); String jarPath = dirUrl.getPath().substring(5, dirUrl.getPath().indexOf("!"));
JarFile jar = new JarFile(URLDecoder.decode(jarPath, "UTF-8")); JarFile jar = new JarFile(URLDecoder.decode(jarPath, StandardCharsets.UTF_8));
Set<String> fileSet = new HashSet<String>(); Set<String> fileSet = new HashSet<String>();
try { try {

View File

@@ -1,8 +1,8 @@
package com.sismics.util.jpa; package com.sismics.util.jpa;
import com.google.common.base.Strings;
import com.sismics.docs.core.util.DirectoryUtil; import com.sismics.docs.core.util.DirectoryUtil;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder; import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.cfg.Environment;
import org.hibernate.internal.util.config.ConfigurationHelper; import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.service.ServiceRegistry; import org.hibernate.service.ServiceRegistry;
import org.slf4j.Logger; import org.slf4j.Logger;
@@ -34,7 +34,6 @@ public final class EMF {
try { try {
properties = getEntityManagerProperties(); properties = getEntityManagerProperties();
Environment.verifyProperties(properties);
ConfigurationHelper.resolvePlaceHolders(properties); ConfigurationHelper.resolvePlaceHolders(properties);
ServiceRegistry reg = new StandardServiceRegistryBuilder().applySettings(properties).build(); ServiceRegistry reg = new StandardServiceRegistryBuilder().applySettings(properties).build();
@@ -85,7 +84,7 @@ public final class EMF {
Map<Object, Object> props = new HashMap<>(); Map<Object, Object> props = new HashMap<>();
Path dbDirectory = DirectoryUtil.getDbDirectory(); Path dbDirectory = DirectoryUtil.getDbDirectory();
String dbFile = dbDirectory.resolve("docs").toAbsolutePath().toString(); String dbFile = dbDirectory.resolve("docs").toAbsolutePath().toString();
if (databaseUrl == null) { if (Strings.isNullOrEmpty(databaseUrl)) {
props.put("hibernate.connection.driver_class", "org.h2.Driver"); props.put("hibernate.connection.driver_class", "org.h2.Driver");
props.put("hibernate.dialect", "org.hibernate.dialect.HSQLDialect"); props.put("hibernate.dialect", "org.hibernate.dialect.HSQLDialect");
props.put("hibernate.connection.url", "jdbc:h2:file:" + dbFile + ";CACHE_SIZE=65536;LOCK_TIMEOUT=10000"); props.put("hibernate.connection.url", "jdbc:h2:file:" + dbFile + ";CACHE_SIZE=65536;LOCK_TIMEOUT=10000");

View File

@@ -1,15 +1,9 @@
package com.sismics.util.mime; package com.sismics.util.mime;
import com.google.common.base.Charsets;
import org.apache.commons.compress.utils.IOUtils;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.net.URLConnection;
import java.io.UnsupportedEncodingException;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
/** /**
* Utility to check MIME types. * Utility to check MIME types.
@@ -18,7 +12,7 @@ import java.util.zip.ZipInputStream;
*/ */
public class MimeTypeUtil { public class MimeTypeUtil {
/** /**
* Try to guess the MIME type of a file by its magic number (header). * Try to guess the MIME type of a file.
* *
* @param file File to inspect * @param file File to inspect
* @param name File name * @param name File name
@@ -26,57 +20,17 @@ public class MimeTypeUtil {
* @throws IOException e * @throws IOException e
*/ */
public static String guessMimeType(Path file, String name) throws IOException { public static String guessMimeType(Path file, String name) throws IOException {
String mimeType; String mimeType = Files.probeContentType(file);
try (InputStream is = Files.newInputStream(file)) {
byte[] headerBytes = new byte[64]; if (mimeType == null && name != null) {
is.read(headerBytes); mimeType = URLConnection.getFileNameMap().getContentTypeFor(name);
mimeType = guessMimeType(headerBytes, name);
} }
return guessOpenDocumentFormat(mimeType, file); if (mimeType == null) {
} return MimeType.DEFAULT;
/**
* Try to guess the MIME type of a file by its magic number (header).
*
* @param headerBytes File header (first bytes)
* @param name File name
* @return MIME type
* @throws UnsupportedEncodingException e
*/
public static String guessMimeType(byte[] headerBytes, String name) throws UnsupportedEncodingException {
String header = new String(headerBytes, "US-ASCII");
// Detect by header bytes
if (header.startsWith("PK")) {
return MimeType.APPLICATION_ZIP;
} else if (header.startsWith("GIF87a") || header.startsWith("GIF89a")) {
return MimeType.IMAGE_GIF;
} else if (headerBytes[0] == ((byte) 0xff) && headerBytes[1] == ((byte) 0xd8)) {
return MimeType.IMAGE_JPEG;
} else if (headerBytes[0] == ((byte) 0x89) && headerBytes[1] == ((byte) 0x50) && headerBytes[2] == ((byte) 0x4e) && headerBytes[3] == ((byte) 0x47) &&
headerBytes[4] == ((byte) 0x0d) && headerBytes[5] == ((byte) 0x0a) && headerBytes[6] == ((byte) 0x1a) && headerBytes[7] == ((byte) 0x0a)) {
return MimeType.IMAGE_PNG;
} else if (headerBytes[0] == ((byte) 0x25) && headerBytes[1] == ((byte) 0x50) && headerBytes[2] == ((byte) 0x44) && headerBytes[3] == ((byte) 0x46)) {
return MimeType.APPLICATION_PDF;
} else if (headerBytes[0] == ((byte) 0x00) && headerBytes[1] == ((byte) 0x00) && headerBytes[2] == ((byte) 0x00)
&& (headerBytes[3] == ((byte) 0x14) || headerBytes[3] == ((byte) 0x18) || headerBytes[3] == ((byte) 0x20))
&& headerBytes[4] == ((byte) 0x66) && headerBytes[5] == ((byte) 0x74) && headerBytes[6] == ((byte) 0x79) && headerBytes[7] == ((byte) 0x70)) {
return MimeType.VIDEO_MP4;
} else if (headerBytes[0] == ((byte) 0x1a) && headerBytes[1] == ((byte) 0x45) && headerBytes[2] == ((byte) 0xdf) && headerBytes[3] == ((byte) 0xa3)) {
return MimeType.VIDEO_WEBM;
} }
// Detect by file extension return mimeType;
if (name != null) {
if (name.endsWith(".txt")) {
return MimeType.TEXT_PLAIN;
} else if (name.endsWith(".csv")) {
return MimeType.TEXT_CSV;
}
}
return MimeType.DEFAULT;
} }
/** /**
@@ -113,52 +67,4 @@ public class MimeTypeUtil {
return "bin"; return "bin";
} }
} }
/**
* Guess the MIME type of open document formats (docx and odt).
* It's more costly than the simple header check, but needed because open document formats
* are simple ZIP files on the outside and much bigger on the inside.
*
* @param mimeType Currently detected MIME type
* @param file File on disk
* @return MIME type
*/
private static String guessOpenDocumentFormat(String mimeType, Path file) {
if (!MimeType.APPLICATION_ZIP.equals(mimeType)) {
// open document formats are ZIP files
return mimeType;
}
try (InputStream inputStream = Files.newInputStream(file);
ZipInputStream zipInputStream = new ZipInputStream(inputStream, Charsets.ISO_8859_1)) {
ZipEntry archiveEntry = zipInputStream.getNextEntry();
while (archiveEntry != null) {
if (archiveEntry.getName().equals("mimetype")) {
// Maybe it's an ODT file
String content = new String(IOUtils.toByteArray(zipInputStream), Charsets.ISO_8859_1);
if (MimeType.OPEN_DOCUMENT_TEXT.equals(content.trim())) {
mimeType = MimeType.OPEN_DOCUMENT_TEXT;
break;
}
} else if (archiveEntry.getName().equals("[Content_Types].xml")) {
// Maybe it's a DOCX file
String content = new String(IOUtils.toByteArray(zipInputStream), Charsets.ISO_8859_1);
if (content.contains(MimeType.OFFICE_DOCUMENT)) {
mimeType = MimeType.OFFICE_DOCUMENT;
break;
} else if (content.contains(MimeType.OFFICE_PRESENTATION)) {
mimeType = MimeType.OFFICE_PRESENTATION;
break;
}
}
archiveEntry = zipInputStream.getNextEntry();
}
} catch (Exception e) {
// In case of any error, just give up and keep the ZIP MIME type
return mimeType;
}
return mimeType;
}
} }

View File

@@ -137,7 +137,6 @@ public class TestFileUtil {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
PdfUtil.convertToPdf(documentDto, Lists.newArrayList(file0, file1, file2, file3, file4, file5), true, true, 10, outputStream); PdfUtil.convertToPdf(documentDto, Lists.newArrayList(file0, file1, file2, file3, file4, file5), true, true, 10, outputStream);
Assert.assertTrue(outputStream.toByteArray().length > 0); Assert.assertTrue(outputStream.toByteArray().length > 0);
com.google.common.io.Files.write(outputStream.toByteArray(), new java.io.File("C:\\Users\\Jendib\\Downloads\\test.pdf"));
} }
} }
} }

View File

@@ -15,7 +15,7 @@ import java.nio.file.Paths;
*/ */
public class TestMimeTypeUtil { public class TestMimeTypeUtil {
@Test @Test
public void guessOpenDocumentFormatTest() throws Exception { public void test() throws Exception {
// Detect ODT files // Detect ODT files
Path path = Paths.get(ClassLoader.getSystemResource("file/document.odt").toURI()); Path path = Paths.get(ClassLoader.getSystemResource("file/document.odt").toURI());
Assert.assertEquals(MimeType.OPEN_DOCUMENT_TEXT, MimeTypeUtil.guessMimeType(path, "document.odt")); Assert.assertEquals(MimeType.OPEN_DOCUMENT_TEXT, MimeTypeUtil.guessMimeType(path, "document.odt"));
@@ -27,5 +27,45 @@ public class TestMimeTypeUtil {
// Detect PPTX files // Detect PPTX files
path = Paths.get(ClassLoader.getSystemResource("file/apache.pptx").toURI()); path = Paths.get(ClassLoader.getSystemResource("file/apache.pptx").toURI());
Assert.assertEquals(MimeType.OFFICE_PRESENTATION, MimeTypeUtil.guessMimeType(path, "apache.pptx")); Assert.assertEquals(MimeType.OFFICE_PRESENTATION, MimeTypeUtil.guessMimeType(path, "apache.pptx"));
// Detect XLSX files
path = Paths.get(ClassLoader.getSystemResource("file/document.xlsx").toURI());
Assert.assertEquals(MimeType.OFFICE_SHEET, MimeTypeUtil.guessMimeType(path, "document.xlsx"));
// Detect TXT files
path = Paths.get(ClassLoader.getSystemResource("file/document.txt").toURI());
Assert.assertEquals(MimeType.TEXT_PLAIN, MimeTypeUtil.guessMimeType(path, "document.txt"));
// Detect CSV files
path = Paths.get(ClassLoader.getSystemResource("file/document.csv").toURI());
Assert.assertEquals(MimeType.TEXT_CSV, MimeTypeUtil.guessMimeType(path, "document.csv"));
// Detect PDF files
path = Paths.get(ClassLoader.getSystemResource("file/udhr.pdf").toURI());
Assert.assertEquals(MimeType.APPLICATION_PDF, MimeTypeUtil.guessMimeType(path, "udhr.pdf"));
// Detect JPEG files
path = Paths.get(ClassLoader.getSystemResource("file/apollo_portrait.jpg").toURI());
Assert.assertEquals(MimeType.IMAGE_JPEG, MimeTypeUtil.guessMimeType(path, "apollo_portrait.jpg"));
// Detect GIF files
path = Paths.get(ClassLoader.getSystemResource("file/image.gif").toURI());
Assert.assertEquals(MimeType.IMAGE_GIF, MimeTypeUtil.guessMimeType(path, "image.gif"));
// Detect PNG files
path = Paths.get(ClassLoader.getSystemResource("file/image.png").toURI());
Assert.assertEquals(MimeType.IMAGE_PNG, MimeTypeUtil.guessMimeType(path, "image.png"));
// Detect ZIP files
path = Paths.get(ClassLoader.getSystemResource("file/document.zip").toURI());
Assert.assertEquals(MimeType.APPLICATION_ZIP, MimeTypeUtil.guessMimeType(path, "document.zip"));
// Detect WEBM files
path = Paths.get(ClassLoader.getSystemResource("file/video.webm").toURI());
Assert.assertEquals(MimeType.VIDEO_WEBM, MimeTypeUtil.guessMimeType(path, "video.webm"));
// Detect MP4 files
path = Paths.get(ClassLoader.getSystemResource("file/video.mp4").toURI());
Assert.assertEquals(MimeType.VIDEO_MP4, MimeTypeUtil.guessMimeType(path, "video.mp4"));
} }
} }

View File

@@ -0,0 +1,2 @@
col1,col2
test,me
1 col1 col2
2 test me

View File

@@ -0,0 +1 @@
test me.

Binary file not shown.

Binary file not shown.

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.4 KiB

Binary file not shown.

Binary file not shown.

View File

@@ -1,81 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<parent>
<groupId>com.sismics.docs</groupId>
<artifactId>docs-parent</artifactId>
<version>1.9</version>
<relativePath>..</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>docs-stress</artifactId>
<packaging>jar</packaging>
<name>Docs Stress</name>
<dependencies>
<!-- Dependencies to Jersey -->
<dependency>
<groupId>org.glassfish.jersey.core</groupId>
<artifactId>jersey-client</artifactId>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.media</groupId>
<artifactId>jersey-media-multipart</artifactId>
</dependency>
<!-- Depenedencies to Docs -->
<dependency>
<groupId>com.sismics.docs</groupId>
<artifactId>docs-web-common</artifactId>
</dependency>
<dependency>
<groupId>com.sismics.docs</groupId>
<artifactId>docs-web-common</artifactId>
<type>test-jar</type>
</dependency>
<!-- Other external dependencies -->
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>jcl-over-slf4j</artifactId>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
</dependency>
</dependencies>
<build>
<resources>
<resource>
<directory>src/main/resources</directory>
</resource>
</resources>
</build>
</project>

View File

@@ -1,135 +0,0 @@
package com.sismics.docs.stress;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.io.Resources;
import com.sismics.docs.rest.util.ClientUtil;
import com.sismics.util.filter.TokenBasedSecurityFilter;
import org.glassfish.jersey.client.ClientResponse;
import org.glassfish.jersey.media.multipart.FormDataMultiPart;
import org.glassfish.jersey.media.multipart.MultiPartFeature;
import org.glassfish.jersey.media.multipart.file.StreamDataBodyPart;
import org.junit.Assert;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.json.JsonObject;
import javax.ws.rs.client.*;
import javax.ws.rs.core.Form;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response.Status;
import java.io.InputStream;
import java.util.Date;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ThreadLocalRandom;
/**
* Stress app for Teedy.
*
* @author bgamard
*/
public class Main {
/**
* Logger.
*/
private static final Logger log = LoggerFactory.getLogger(Main.class);
private static final String API_URL = "http://localhost:9999/docs-web/api/";
private static final int USER_COUNT = 50;
private static final int DOCUMENT_PER_USER_COUNT = 2000;
private static final int TAG_PER_USER_COUNT = 20;
private static final int FILE_PER_DOCUMENT_COUNT = 10;
private static Client client = ClientBuilder.newClient();
private static Set<User> userSet = Sets.newHashSet();
/**
* Entry point.
*
* @param args Args
* @throws Exception
*/
public static void main(String[] args) throws Exception {
log.info("Starting stress test...");
WebTarget resource = client.target(API_URL);
ClientUtil clientUtil = new ClientUtil(resource);
// Create users
for (int i = 0; i < USER_COUNT; i++) {
String username = generateString();
clientUtil.createUser(username);
userSet.add(new User(username, (clientUtil.login(username))));
log.info("Created user " + (i + 1) + "/" + USER_COUNT);
}
// Create tags for each user
int tagCreatedCount = 1;
for (User user : userSet) {
Invocation.Builder tagResource = resource.path("/tag").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, user.authToken);
for (int j = 0; j < TAG_PER_USER_COUNT; j++) {
Form form = new Form();
String name = generateString();
form.param("name", name);
form.param("color", "#ff0000");
JsonObject json = tagResource.put(Entity.form(form), JsonObject.class);
user.tagList.add(json.getString("id"));
log.info("Created tag " + (tagCreatedCount++) + "/" + TAG_PER_USER_COUNT * USER_COUNT);
}
}
// Create documents for each user
int documentCreatedCount = 1;
for (User user : userSet) {
for (int i = 0; i < DOCUMENT_PER_USER_COUNT; i++) {
long createDate = new Date().getTime();
Form form = new Form()
.param("title", generateString())
.param("description", generateString())
.param("tags", user.tagList.get(ThreadLocalRandom.current().nextInt(user.tagList.size()))) // Random tag
.param("language", "eng")
.param("create_date", Long.toString(createDate));
JsonObject json = resource.path("/document").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, user.authToken)
.put(Entity.form(form), JsonObject.class);
String documentId = json.getString("id");
log.info("Created document " + (documentCreatedCount++) + "/" + DOCUMENT_PER_USER_COUNT * USER_COUNT + " for user: " + user.username);
// Add files for each document
for (int j = 0; j < FILE_PER_DOCUMENT_COUNT; j++) {
try (InputStream is = Resources.getResource("empty.png").openStream()) {
StreamDataBodyPart streamDataBodyPart = new StreamDataBodyPart("file", is, "empty.png");
@SuppressWarnings("resource")
ClientResponse response = resource
.register(MultiPartFeature.class)
.path("/file").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, user.authToken)
.put(Entity.entity(new FormDataMultiPart().field("id", documentId).bodyPart(streamDataBodyPart),
MediaType.MULTIPART_FORM_DATA_TYPE), ClientResponse.class);
Assert.assertEquals(Status.OK, Status.fromStatusCode(response.getStatus()));
}
}
}
}
}
private static String generateString() {
return UUID.randomUUID().toString().replace("-", "");
}
private static class User {
String username;
List<String> tagList = Lists.newArrayList();
String authToken;
User(String username, String authToken) {
this.username = username;
this.authToken = authToken;
}
}
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 921 B

View File

@@ -1,6 +0,0 @@
log4j.rootCategory=WARN, CONSOLE
log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender
log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout
log4j.appender.CONSOLE.layout.ConversionPattern=%d{DATE} %p %l %m %n
log4j.logger.com.sismics=DEBUG

View File

@@ -5,7 +5,7 @@
<parent> <parent>
<groupId>com.sismics.docs</groupId> <groupId>com.sismics.docs</groupId>
<artifactId>docs-parent</artifactId> <artifactId>docs-parent</artifactId>
<version>1.9</version> <version>1.11</version>
<relativePath>..</relativePath> <relativePath>..</relativePath>
</parent> </parent>

View File

@@ -0,0 +1,40 @@
package com.sismics.rest.util;
import com.sismics.docs.core.model.jpa.File;
import com.sismics.docs.core.util.DirectoryUtil;
import com.sismics.docs.core.util.FileUtil;
import com.sismics.rest.exception.ServerException;
import com.sismics.util.JsonUtil;
import javax.json.Json;
import javax.json.JsonObjectBuilder;
import java.io.IOException;
import java.nio.file.Files;
/**
* Rest utilities.
*
* @author bgamard
*/
public class RestUtil {
/**
* Transform a File into its JSON representation
* @param fileDb a file
* @return the JSON
*/
public static JsonObjectBuilder fileToJsonObjectBuilder(File fileDb) {
try {
return Json.createObjectBuilder()
.add("id", fileDb.getId())
.add("processing", FileUtil.isProcessingFile(fileDb.getId()))
.add("name", JsonUtil.nullable(fileDb.getName()))
.add("version", fileDb.getVersion())
.add("mimetype", fileDb.getMimeType())
.add("document_id", JsonUtil.nullable(fileDb.getDocumentId()))
.add("create_date", fileDb.getCreateDate().getTime())
.add("size", Files.size(DirectoryUtil.getStorageDirectory().resolve(fileDb.getId())));
} catch (IOException e) {
throw new ServerException("FileError", "Unable to get the size of " + fileDb.getId(), e);
}
}
}

View File

@@ -21,6 +21,8 @@ public class ValidationUtil {
private static Pattern ALPHANUMERIC_PATTERN = Pattern.compile("[a-zA-Z0-9_]+"); private static Pattern ALPHANUMERIC_PATTERN = Pattern.compile("[a-zA-Z0-9_]+");
private static Pattern USERNAME_PATTERN = Pattern.compile("[a-zA-Z0-9_@\\.]+");
/** /**
* Checks that the argument is not null. * Checks that the argument is not null.
* *
@@ -152,6 +154,12 @@ public class ValidationUtil {
} }
} }
public static void validateUsername(String s, String name) throws ClientException {
if (!USERNAME_PATTERN.matcher(s).matches()) {
throw new ClientException("ValidationError", MessageFormat.format("{0} must have only alphanumeric, underscore characters or @ and .", name));
}
}
public static void validateRegex(String s, String name, String regex) throws ClientException { public static void validateRegex(String s, String name, String regex) throws ClientException {
if (!Pattern.compile(regex).matcher(s).matches()) { if (!Pattern.compile(regex).matcher(s).matches()) {
throw new ClientException("ValidationError", MessageFormat.format("{0} must match {1}", name, regex)); throw new ClientException("ValidationError", MessageFormat.format("{0} must match {1}", name, regex));

View File

@@ -26,6 +26,7 @@ import java.io.ByteArrayOutputStream;
import java.io.IOException; import java.io.IOException;
import java.net.URI; import java.net.URI;
import java.util.List; import java.util.List;
import java.util.Objects;
/** /**
* Base class of integration tests with Jersey. * Base class of integration tests with Jersey.
@@ -33,6 +34,16 @@ import java.util.List;
* @author jtremeaux * @author jtremeaux
*/ */
public abstract class BaseJerseyTest extends JerseyTest { public abstract class BaseJerseyTest extends JerseyTest {
protected static final String FILE_APACHE_PPTX = "file/apache.pptx";
protected static final String FILE_DOCUMENT_DOCX = "file/document.docx";
protected static final String FILE_DOCUMENT_ODT = "file/document.odt";
protected static final String FILE_DOCUMENT_TXT = "file/document.txt";
protected static final String FILE_EINSTEIN_ROOSEVELT_LETTER_PNG = "file/Einstein-Roosevelt-letter.png";
protected static final String FILE_PIA_00452_JPG = "file/PIA00452.jpg";
protected static final String FILE_VIDEO_WEBM = "file/video.webm";
protected static final String FILE_WIKIPEDIA_PDF = "file/wikipedia.pdf";
protected static final String FILE_WIKIPEDIA_ZIP = "file/wikipedia.zip";
/** /**
* Test HTTP server. * Test HTTP server.
*/ */
@@ -56,7 +67,7 @@ public abstract class BaseJerseyTest extends JerseyTest {
@Override @Override
protected Application configure() { protected Application configure() {
String travisEnv = System.getenv("TRAVIS"); String travisEnv = System.getenv("TRAVIS");
if (travisEnv == null || !travisEnv.equals("true")) { if (!Objects.equals(travisEnv, "true")) {
// Travis doesn't like big logs // Travis doesn't like big logs
enable(TestProperties.LOG_TRAFFIC); enable(TestProperties.LOG_TRAFFIC);
enable(TestProperties.DUMP_ENTITY); enable(TestProperties.DUMP_ENTITY);

View File

@@ -3,6 +3,7 @@ package com.sismics.docs.rest.util;
import com.google.common.io.Resources; import com.google.common.io.Resources;
import com.sismics.util.filter.TokenBasedSecurityFilter; import com.sismics.util.filter.TokenBasedSecurityFilter;
import org.glassfish.jersey.media.multipart.FormDataMultiPart; import org.glassfish.jersey.media.multipart.FormDataMultiPart;
import org.glassfish.jersey.media.multipart.MultiPart;
import org.glassfish.jersey.media.multipart.MultiPartFeature; import org.glassfish.jersey.media.multipart.MultiPartFeature;
import org.glassfish.jersey.media.multipart.file.StreamDataBodyPart; import org.glassfish.jersey.media.multipart.file.StreamDataBodyPart;
import org.junit.Assert; import org.junit.Assert;
@@ -16,6 +17,12 @@ import javax.ws.rs.core.NewCookie;
import javax.ws.rs.core.Response; import javax.ws.rs.core.Response;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Date;
/** /**
* REST client utilities. * REST client utilities.
@@ -156,27 +163,58 @@ public class ClientUtil {
return authToken; return authToken;
} }
/**
* Create a document
*
* @param token Authentication token
* @return Document ID
*/
public String createDocument(String token) {
JsonObject json = this.resource.path("/document").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, token)
.put(Entity.form(new Form()
.param("title", "Document Title")
.param("description", "Document description")
.param("language", "eng")
.param("create_date", Long.toString(new Date().getTime()))), JsonObject.class);
String documentId = json.getString("id");
Assert.assertNotNull(documentId);
return documentId;
}
/** /**
* Add a file to a document. * Add a file to a document.
* *
* @param file File path * @param file File path
* @param filename Filename
* @param token Authentication token * @param token Authentication token
* @param documentId Document ID * @param documentId Document ID
* @return File ID * @return File ID
* @throws IOException e * @throws IOException e
* @throws URISyntaxException e
*/ */
public String addFileToDocument(String file, String filename, String token, String documentId) throws IOException { public String addFileToDocument(String file, String token, String documentId) throws IOException, URISyntaxException {
try (InputStream is = Resources.getResource(file).openStream()) { URL fileResource = Resources.getResource(file);
Path filePath = Paths.get(fileResource.toURI());
String filename = filePath.getFileName().toString();
try (InputStream is = fileResource.openStream()) {
StreamDataBodyPart streamDataBodyPart = new StreamDataBodyPart("file", is, filename); StreamDataBodyPart streamDataBodyPart = new StreamDataBodyPart("file", is, filename);
try (FormDataMultiPart multiPart = new FormDataMultiPart()) { try (FormDataMultiPart multiPart = new FormDataMultiPart()) {
JsonObject json = resource MultiPart formContent;
if (documentId != null) {
formContent = multiPart.field("id", documentId).bodyPart(streamDataBodyPart);
} else {
formContent = multiPart.bodyPart(streamDataBodyPart);
}
JsonObject json = this.resource
.register(MultiPartFeature.class) .register(MultiPartFeature.class)
.path("/file").request() .path("/file").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, token) .cookie(TokenBasedSecurityFilter.COOKIE_NAME, token)
.put(Entity.entity(multiPart.field("id", documentId).bodyPart(streamDataBodyPart), .put(Entity.entity(formContent,
MediaType.MULTIPART_FORM_DATA_TYPE), JsonObject.class); MediaType.MULTIPART_FORM_DATA_TYPE), JsonObject.class);
return json.getString("id"); String fileId = json.getString("id");
Assert.assertNotNull(fileId);
Assert.assertEquals(Files.size(filePath), json.getJsonNumber("size").longValue());
return fileId;
} }
} }
} }

View File

@@ -5,7 +5,7 @@
<parent> <parent>
<groupId>com.sismics.docs</groupId> <groupId>com.sismics.docs</groupId>
<artifactId>docs-parent</artifactId> <artifactId>docs-parent</artifactId>
<version>1.9</version> <version>1.11</version>
<relativePath>..</relativePath> <relativePath>..</relativePath>
</parent> </parent>
@@ -209,54 +209,6 @@
</build> </build>
</profile> </profile>
<!-- Stress profile -->
<profile>
<id>stress</id>
<activation>
<property>
<name>env</name>
<value>stress</value>
</property>
</activation>
<build>
<resources>
<resource>
<directory>src/stress/resources</directory>
<filtering>false</filtering>
<excludes>
<exclude>**/config.properties</exclude>
</excludes>
</resource>
<resource>
<directory>src/stress/resources</directory>
<filtering>true</filtering>
<includes>
<include>**/config.properties</include>
</includes>
</resource>
</resources>
<plugins>
<plugin>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-maven-plugin</artifactId>
<configuration>
<systemProperties>
<systemProperty>
<name>application.mode</name>
<value>dev</value>
</systemProperty>
</systemProperties>
<webApp>
<contextPath>/docs-web</contextPath>
</webApp>
</configuration>
</plugin>
</plugins>
</build>
</profile>
<!-- Production profile --> <!-- Production profile -->
<profile> <profile>
<id>prod</id> <id>prod</id>

View File

@@ -205,28 +205,28 @@ public class AppResource extends BaseResource {
Config passwordConfig = configDao.getById(ConfigType.SMTP_PASSWORD); Config passwordConfig = configDao.getById(ConfigType.SMTP_PASSWORD);
Config fromConfig = configDao.getById(ConfigType.SMTP_FROM); Config fromConfig = configDao.getById(ConfigType.SMTP_FROM);
JsonObjectBuilder response = Json.createObjectBuilder(); JsonObjectBuilder response = Json.createObjectBuilder();
if (System.getenv(Constants.SMTP_HOSTNAME_ENV) == null) { if (Strings.isNullOrEmpty(System.getenv(Constants.SMTP_HOSTNAME_ENV))) {
if (hostnameConfig == null) { if (hostnameConfig == null) {
response.addNull("hostname"); response.addNull("hostname");
} else { } else {
response.add("hostname", hostnameConfig.getValue()); response.add("hostname", hostnameConfig.getValue());
} }
} }
if (System.getenv(Constants.SMTP_PORT_ENV) == null) { if (Strings.isNullOrEmpty(System.getenv(Constants.SMTP_PORT_ENV))) {
if (portConfig == null) { if (portConfig == null) {
response.addNull("port"); response.addNull("port");
} else { } else {
response.add("port", Integer.valueOf(portConfig.getValue())); response.add("port", Integer.valueOf(portConfig.getValue()));
} }
} }
if (System.getenv(Constants.SMTP_USERNAME_ENV) == null) { if (Strings.isNullOrEmpty(System.getenv(Constants.SMTP_USERNAME_ENV))) {
if (usernameConfig == null) { if (usernameConfig == null) {
response.addNull("username"); response.addNull("username");
} else { } else {
response.add("username", usernameConfig.getValue()); response.add("username", usernameConfig.getValue());
} }
} }
if (System.getenv(Constants.SMTP_PASSWORD_ENV) == null) { if (Strings.isNullOrEmpty(System.getenv(Constants.SMTP_PASSWORD_ENV))) {
if (passwordConfig == null) { if (passwordConfig == null) {
response.addNull("password"); response.addNull("password");
} else { } else {
@@ -396,6 +396,8 @@ public class AppResource extends BaseResource {
* @apiName PostAppConfigInbox * @apiName PostAppConfigInbox
* @apiGroup App * @apiGroup App
* @apiParam {Boolean} enabled True if the inbox scanning is enabled * @apiParam {Boolean} enabled True if the inbox scanning is enabled
* @apiParam {Boolean} autoTagsEnabled If true automatically add tags to document (prefixed by #)
* @apiParam {Boolean} deleteImported If true delete message from mailbox after import
* @apiParam {String} hostname IMAP hostname * @apiParam {String} hostname IMAP hostname
* @apiParam {Integer} port IMAP port * @apiParam {Integer} port IMAP port
* @apiParam {String} username IMAP username * @apiParam {String} username IMAP username
@@ -432,6 +434,8 @@ public class AppResource extends BaseResource {
} }
checkBaseFunction(BaseFunction.ADMIN); checkBaseFunction(BaseFunction.ADMIN);
ValidationUtil.validateRequired(enabled, "enabled"); ValidationUtil.validateRequired(enabled, "enabled");
ValidationUtil.validateRequired(autoTagsEnabled, "autoTagsEnabled");
ValidationUtil.validateRequired(deleteImported, "deleteImported");
if (!Strings.isNullOrEmpty(portStr)) { if (!Strings.isNullOrEmpty(portStr)) {
ValidationUtil.validateInteger(portStr, "port"); ValidationUtil.validateInteger(portStr, "port");
} }
@@ -508,7 +512,7 @@ public class AppResource extends BaseResource {
* @apiSuccess {String} logs.message Message * @apiSuccess {String} logs.message Message
* @apiError (client) ForbiddenError Access denied * @apiError (client) ForbiddenError Access denied
* @apiError (server) ServerError MEMORY appender not configured * @apiError (server) ServerError MEMORY appender not configured
* @apiPermission user * @apiPermission admin
* @apiVersion 1.5.0 * @apiVersion 1.5.0
* *
* @param minLevel Filter on logging level * @param minLevel Filter on logging level
@@ -529,6 +533,7 @@ public class AppResource extends BaseResource {
if (!authenticate()) { if (!authenticate()) {
throw new ForbiddenClientException(); throw new ForbiddenClientException();
} }
checkBaseFunction(BaseFunction.ADMIN);
// Get the memory appender // Get the memory appender
org.apache.log4j.Logger logger = org.apache.log4j.Logger.getRootLogger(); org.apache.log4j.Logger logger = org.apache.log4j.Logger.getRootLogger();
@@ -665,45 +670,45 @@ public class AppResource extends BaseResource {
log.info("Deleting {} orphan ACLs", q.executeUpdate()); log.info("Deleting {} orphan ACLs", q.executeUpdate());
// Soft delete orphan comments // Soft delete orphan comments
q = em.createNativeQuery("update T_COMMENT c set c.COM_DELETEDATE_D = :dateNow where c.COM_ID_C in (select c.COM_ID_C from T_COMMENT c left join T_DOCUMENT d on d.DOC_ID_C = c.COM_IDDOC_C and d.DOC_DELETEDATE_D is null where d.DOC_ID_C is null)"); q = em.createNativeQuery("update T_COMMENT set COM_DELETEDATE_D = :dateNow where COM_ID_C in (select c.COM_ID_C from T_COMMENT c left join T_DOCUMENT d on d.DOC_ID_C = c.COM_IDDOC_C and d.DOC_DELETEDATE_D is null where d.DOC_ID_C is null)");
q.setParameter("dateNow", new Date()); q.setParameter("dateNow", new Date());
log.info("Deleting {} orphan comments", q.executeUpdate()); log.info("Deleting {} orphan comments", q.executeUpdate());
// Soft delete orphan document tag links // Soft delete orphan document tag links
q = em.createNativeQuery("update T_DOCUMENT_TAG dt set dt.DOT_DELETEDATE_D = :dateNow where dt.DOT_ID_C in (select dt.DOT_ID_C from T_DOCUMENT_TAG dt left join T_DOCUMENT d on dt.DOT_IDDOCUMENT_C = d.DOC_ID_C and d.DOC_DELETEDATE_D is null left join T_TAG t on t.TAG_ID_C = dt.DOT_IDTAG_C and t.TAG_DELETEDATE_D is null where d.DOC_ID_C is null or t.TAG_ID_C is null)"); q = em.createNativeQuery("update T_DOCUMENT_TAG set DOT_DELETEDATE_D = :dateNow where DOT_ID_C in (select dt.DOT_ID_C from T_DOCUMENT_TAG dt left join T_DOCUMENT d on dt.DOT_IDDOCUMENT_C = d.DOC_ID_C and d.DOC_DELETEDATE_D is null left join T_TAG t on t.TAG_ID_C = dt.DOT_IDTAG_C and t.TAG_DELETEDATE_D is null where d.DOC_ID_C is null or t.TAG_ID_C is null)");
q.setParameter("dateNow", new Date()); q.setParameter("dateNow", new Date());
log.info("Deleting {} orphan document tag links", q.executeUpdate()); log.info("Deleting {} orphan document tag links", q.executeUpdate());
// Soft delete orphan shares // Soft delete orphan shares
q = em.createNativeQuery("update T_SHARE s set s.SHA_DELETEDATE_D = :dateNow where s.SHA_ID_C in (select s.SHA_ID_C from T_SHARE s left join T_ACL a on a.ACL_TARGETID_C = s.SHA_ID_C and a.ACL_DELETEDATE_D is null where a.ACL_ID_C is null)"); q = em.createNativeQuery("update T_SHARE set SHA_DELETEDATE_D = :dateNow where SHA_ID_C in (select s.SHA_ID_C from T_SHARE s left join T_ACL a on a.ACL_TARGETID_C = s.SHA_ID_C and a.ACL_DELETEDATE_D is null where a.ACL_ID_C is null)");
q.setParameter("dateNow", new Date()); q.setParameter("dateNow", new Date());
log.info("Deleting {} orphan shares", q.executeUpdate()); log.info("Deleting {} orphan shares", q.executeUpdate());
// Soft delete orphan tags // Soft delete orphan tags
q = em.createNativeQuery("update T_TAG t set t.TAG_DELETEDATE_D = :dateNow where t.TAG_ID_C in (select t.TAG_ID_C from T_TAG t left join T_USER u on u.USE_ID_C = t.TAG_IDUSER_C and u.USE_DELETEDATE_D is null where u.USE_ID_C is null)"); q = em.createNativeQuery("update T_TAG set TAG_DELETEDATE_D = :dateNow where TAG_ID_C in (select t.TAG_ID_C from T_TAG t left join T_USER u on u.USE_ID_C = t.TAG_IDUSER_C and u.USE_DELETEDATE_D is null where u.USE_ID_C is null)");
q.setParameter("dateNow", new Date()); q.setParameter("dateNow", new Date());
log.info("Deleting {} orphan tags", q.executeUpdate()); log.info("Deleting {} orphan tags", q.executeUpdate());
// Soft delete orphan documents // Soft delete orphan documents
q = em.createNativeQuery("update T_DOCUMENT d set d.DOC_DELETEDATE_D = :dateNow where d.DOC_ID_C in (select d.DOC_ID_C from T_DOCUMENT d left join T_USER u on u.USE_ID_C = d.DOC_IDUSER_C and u.USE_DELETEDATE_D is null where u.USE_ID_C is null)"); q = em.createNativeQuery("update T_DOCUMENT set DOC_DELETEDATE_D = :dateNow where DOC_ID_C in (select d.DOC_ID_C from T_DOCUMENT d left join T_USER u on u.USE_ID_C = d.DOC_IDUSER_C and u.USE_DELETEDATE_D is null where u.USE_ID_C is null)");
q.setParameter("dateNow", new Date()); q.setParameter("dateNow", new Date());
log.info("Deleting {} orphan documents", q.executeUpdate()); log.info("Deleting {} orphan documents", q.executeUpdate());
// Soft delete orphan files // Soft delete orphan files
q = em.createNativeQuery("update T_FILE f set f.FIL_DELETEDATE_D = :dateNow where f.FIL_ID_C in (select f.FIL_ID_C from T_FILE f left join T_USER u on u.USE_ID_C = f.FIL_IDUSER_C and u.USE_DELETEDATE_D is null where u.USE_ID_C is null)"); q = em.createNativeQuery("update T_FILE set FIL_DELETEDATE_D = :dateNow where FIL_ID_C in (select f.FIL_ID_C from T_FILE f left join T_USER u on u.USE_ID_C = f.FIL_IDUSER_C and u.USE_DELETEDATE_D is null where u.USE_ID_C is null)");
q.setParameter("dateNow", new Date()); q.setParameter("dateNow", new Date());
log.info("Deleting {} orphan files", q.executeUpdate()); log.info("Deleting {} orphan files", q.executeUpdate());
// Hard delete softly deleted data // Hard delete softly deleted data
log.info("Deleting {} soft deleted document tag links", em.createQuery("delete DocumentTag dt where dt.deleteDate is not null").executeUpdate()); log.info("Deleting {} soft deleted document tag links", em.createQuery("delete DocumentTag where deleteDate is not null").executeUpdate());
log.info("Deleting {} soft deleted ACLs", em.createQuery("delete Acl a where a.deleteDate is not null").executeUpdate()); log.info("Deleting {} soft deleted ACLs", em.createQuery("delete Acl where deleteDate is not null").executeUpdate());
log.info("Deleting {} soft deleted shares", em.createQuery("delete Share s where s.deleteDate is not null").executeUpdate()); log.info("Deleting {} soft deleted shares", em.createQuery("delete Share where deleteDate is not null").executeUpdate());
log.info("Deleting {} soft deleted tags", em.createQuery("delete Tag t where t.deleteDate is not null").executeUpdate()); log.info("Deleting {} soft deleted tags", em.createQuery("delete Tag where deleteDate is not null").executeUpdate());
log.info("Deleting {} soft deleted comments", em.createQuery("delete Comment c where c.deleteDate is not null").executeUpdate()); log.info("Deleting {} soft deleted comments", em.createQuery("delete Comment where deleteDate is not null").executeUpdate());
log.info("Deleting {} soft deleted files", em.createQuery("delete File f where f.deleteDate is not null").executeUpdate()); log.info("Deleting {} soft deleted files", em.createQuery("delete File where deleteDate is not null").executeUpdate());
log.info("Deleting {} soft deleted documents", em.createQuery("delete Document d where d.deleteDate is not null").executeUpdate()); log.info("Deleting {} soft deleted documents", em.createQuery("delete Document where deleteDate is not null").executeUpdate());
log.info("Deleting {} soft deleted users", em.createQuery("delete User u where u.deleteDate is not null").executeUpdate()); log.info("Deleting {} soft deleted users", em.createQuery("delete User where deleteDate is not null").executeUpdate());
log.info("Deleting {} soft deleted groups", em.createQuery("delete Group g where g.deleteDate is not null").executeUpdate()); log.info("Deleting {} soft deleted groups", em.createQuery("delete Group where deleteDate is not null").executeUpdate());
// Always return OK // Always return OK
JsonObjectBuilder response = Json.createObjectBuilder() JsonObjectBuilder response = Json.createObjectBuilder()

View File

@@ -8,8 +8,11 @@ import com.sismics.security.UserPrincipal;
import com.sismics.util.filter.SecurityFilter; import com.sismics.util.filter.SecurityFilter;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam; import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context; import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import java.security.Principal; import java.security.Principal;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
@@ -19,6 +22,8 @@ import java.util.Set;
* *
* @author jtremeaux * @author jtremeaux
*/ */
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
@Produces(MediaType.APPLICATION_JSON)
public abstract class BaseResource { public abstract class BaseResource {
/** /**
* @apiDefine admin Admin * @apiDefine admin Admin

View File

@@ -0,0 +1,34 @@
package com.sismics.docs.rest.resource;
import org.glassfish.jersey.message.internal.ReaderWriter;
import javax.json.JsonObject;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.ext.MessageBodyWriter;
import javax.ws.rs.ext.Provider;
import java.io.IOException;
import java.io.OutputStream;
import java.lang.annotation.Annotation;
import java.lang.reflect.Type;
/**
* When a JSON-based exception is thrown but a JSON response is not expected,
* set the media type of the response as plain text.
*/
@Provider
@Produces(MediaType.APPLICATION_OCTET_STREAM)
public class DocsMessageBodyWriter implements MessageBodyWriter<JsonObject> {
@Override
public boolean isWriteable(Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType) {
return true;
}
@Override
public void writeTo(JsonObject o, Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType, MultivaluedMap<String, Object> httpHeaders, OutputStream entityStream) throws IOException, WebApplicationException {
ReaderWriter.writeToAsString(o.toString(), entityStream, MediaType.TEXT_PLAIN_TYPE);
}
}

View File

@@ -27,11 +27,13 @@ import com.sismics.rest.exception.ClientException;
import com.sismics.rest.exception.ForbiddenClientException; import com.sismics.rest.exception.ForbiddenClientException;
import com.sismics.rest.exception.ServerException; import com.sismics.rest.exception.ServerException;
import com.sismics.rest.util.AclUtil; import com.sismics.rest.util.AclUtil;
import com.sismics.rest.util.RestUtil;
import com.sismics.rest.util.ValidationUtil; import com.sismics.rest.util.ValidationUtil;
import com.sismics.util.EmailUtil; import com.sismics.util.EmailUtil;
import com.sismics.util.JsonUtil; import com.sismics.util.JsonUtil;
import com.sismics.util.context.ThreadLocalContext; import com.sismics.util.context.ThreadLocalContext;
import com.sismics.util.mime.MimeType; import com.sismics.util.mime.MimeType;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.glassfish.jersey.media.multipart.FormDataBodyPart; import org.glassfish.jersey.media.multipart.FormDataBodyPart;
import org.glassfish.jersey.media.multipart.FormDataParam; import org.glassfish.jersey.media.multipart.FormDataParam;
@@ -65,6 +67,21 @@ import java.util.*;
*/ */
@Path("/document") @Path("/document")
public class DocumentResource extends BaseResource { public class DocumentResource extends BaseResource {
protected static final DateTimeParser YEAR_PARSER = DateTimeFormat.forPattern("yyyy").getParser();
protected static final DateTimeParser MONTH_PARSER = DateTimeFormat.forPattern("yyyy-MM").getParser();
protected static final DateTimeParser DAY_PARSER = DateTimeFormat.forPattern("yyyy-MM-dd").getParser();
private static final DateTimeFormatter DAY_FORMATTER = new DateTimeFormatter(null, DAY_PARSER);
private static final DateTimeFormatter MONTH_FORMATTER = new DateTimeFormatter(null, MONTH_PARSER);
private static final DateTimeFormatter YEAR_FORMATTER = new DateTimeFormatter(null, YEAR_PARSER);
private static final DateTimeParser[] DATE_PARSERS = new DateTimeParser[]{
YEAR_PARSER,
MONTH_PARSER,
DAY_PARSER};
private static final DateTimeFormatter DATE_FORMATTER = new DateTimeFormatterBuilder().append( null, DATE_PARSERS).toFormatter();
/** /**
* Returns a document. * Returns a document.
* *
@@ -73,6 +90,7 @@ public class DocumentResource extends BaseResource {
* @apiGroup Document * @apiGroup Document
* @apiParam {String} id Document ID * @apiParam {String} id Document ID
* @apiParam {String} share Share ID * @apiParam {String} share Share ID
* @apiParam {Booleans} files If true includes files information
* @apiSuccess {String} id ID * @apiSuccess {String} id ID
* @apiSuccess {String} title Title * @apiSuccess {String} title Title
* @apiSuccess {String} description Description * @apiSuccess {String} description Description
@@ -119,6 +137,12 @@ public class DocumentResource extends BaseResource {
* @apiSuccess {String} route_step.name Route step name * @apiSuccess {String} route_step.name Route step name
* @apiSuccess {String="APPROVE", "VALIDATE"} route_step.type Route step type * @apiSuccess {String="APPROVE", "VALIDATE"} route_step.type Route step type
* @apiSuccess {Boolean} route_step.transitionable True if the route step is actionable by the current user * @apiSuccess {Boolean} route_step.transitionable True if the route step is actionable by the current user
* @apiSuccess {Object[]} files List of files
* @apiSuccess {String} files.id ID
* @apiSuccess {String} files.name File name
* @apiSuccess {String} files.version Zero-based version number
* @apiSuccess {String} files.mimetype MIME type
* @apiSuccess {String} files.create_date Create date (timestamp)
* @apiError (client) NotFound Document not found * @apiError (client) NotFound Document not found
* @apiPermission none * @apiPermission none
* @apiVersion 1.5.0 * @apiVersion 1.5.0
@@ -131,7 +155,8 @@ public class DocumentResource extends BaseResource {
@Path("{id: [a-z0-9\\-]+}") @Path("{id: [a-z0-9\\-]+}")
public Response get( public Response get(
@PathParam("id") String documentId, @PathParam("id") String documentId,
@QueryParam("share") String shareId) { @QueryParam("share") String shareId,
@QueryParam("files") Boolean files) {
authenticate(); authenticate();
DocumentDao documentDao = new DocumentDao(); DocumentDao documentDao = new DocumentDao();
@@ -240,6 +265,19 @@ public class DocumentResource extends BaseResource {
// Add custom metadata // Add custom metadata
MetadataUtil.addMetadata(document, documentId); MetadataUtil.addMetadata(document, documentId);
// Add files
if (Boolean.TRUE == files) {
FileDao fileDao = new FileDao();
List<File> fileList = fileDao.getByDocumentsIds(Collections.singleton(documentId));
JsonArrayBuilder filesArrayBuilder = Json.createArrayBuilder();
for (File fileDb : fileList) {
filesArrayBuilder.add(RestUtil.fileToJsonObjectBuilder(fileDb));
}
document.add("files", filesArrayBuilder);
}
return Response.ok().entity(document.build()).build(); return Response.ok().entity(document.build()).build();
} }
@@ -326,7 +364,8 @@ public class DocumentResource extends BaseResource {
* @apiParam {String} offset Start at this index * @apiParam {String} offset Start at this index
* @apiParam {Number} sort_column Column index to sort on * @apiParam {Number} sort_column Column index to sort on
* @apiParam {Boolean} asc If true, sort in ascending order * @apiParam {Boolean} asc If true, sort in ascending order
* @apiParam {String} search Search query * @apiParam {String} search Search query (see "Document search syntax" on the top of the page for explanations)
* @apiParam {Booleans} files If true includes files information
* @apiSuccess {Number} total Total number of documents * @apiSuccess {Number} total Total number of documents
* @apiSuccess {Object[]} documents List of documents * @apiSuccess {Object[]} documents List of documents
* @apiSuccess {String} documents.id ID * @apiSuccess {String} documents.id ID
@@ -345,6 +384,12 @@ public class DocumentResource extends BaseResource {
* @apiSuccess {String} documents.tags.id ID * @apiSuccess {String} documents.tags.id ID
* @apiSuccess {String} documents.tags.name Name * @apiSuccess {String} documents.tags.name Name
* @apiSuccess {String} documents.tags.color Color * @apiSuccess {String} documents.tags.color Color
* @apiSuccess {Object[]} documents.files List of files
* @apiSuccess {String} documents.files.id ID
* @apiSuccess {String} documents.files.name File name
* @apiSuccess {String} documents.files.version Zero-based version number
* @apiSuccess {String} documents.files.mimetype MIME type
* @apiSuccess {String} documents.files.create_date Create date (timestamp)
* @apiSuccess {String[]} suggestions List of search suggestions * @apiSuccess {String[]} suggestions List of search suggestions
* @apiError (client) ForbiddenError Access denied * @apiError (client) ForbiddenError Access denied
* @apiError (server) SearchError Error searching in documents * @apiError (server) SearchError Error searching in documents
@@ -356,6 +401,7 @@ public class DocumentResource extends BaseResource {
* @param sortColumn Sort column * @param sortColumn Sort column
* @param asc Sorting * @param asc Sorting
* @param search Search query * @param search Search query
* @param files Files list
* @return Response * @return Response
*/ */
@GET @GET
@@ -365,7 +411,8 @@ public class DocumentResource extends BaseResource {
@QueryParam("offset") Integer offset, @QueryParam("offset") Integer offset,
@QueryParam("sort_column") Integer sortColumn, @QueryParam("sort_column") Integer sortColumn,
@QueryParam("asc") Boolean asc, @QueryParam("asc") Boolean asc,
@QueryParam("search") String search) { @QueryParam("search") String search,
@QueryParam("files") Boolean files) {
if (!authenticate()) { if (!authenticate()) {
throw new ForbiddenClientException(); throw new ForbiddenClientException();
} }
@@ -385,6 +432,14 @@ public class DocumentResource extends BaseResource {
throw new ServerException("SearchError", "Error searching in documents", e); throw new ServerException("SearchError", "Error searching in documents", e);
} }
// Find the files of the documents
List<File> filesList = null;
if (Boolean.TRUE == files) {
Iterable<String> documentsIds = CollectionUtils.collect(paginatedList.getResultList(), DocumentDto::getId);
FileDao fileDao = new FileDao();
filesList = fileDao.getByDocumentsIds(documentsIds);
}
for (DocumentDto documentDto : paginatedList.getResultList()) { for (DocumentDto documentDto : paginatedList.getResultList()) {
// Get tags accessible by the current user on this document // Get tags accessible by the current user on this document
List<TagDto> tagDtoList = tagDao.findByCriteria(new TagCriteria() List<TagDto> tagDtoList = tagDao.findByCriteria(new TagCriteria()
@@ -398,7 +453,7 @@ public class DocumentResource extends BaseResource {
.add("color", tagDto.getColor())); .add("color", tagDto.getColor()));
} }
documents.add(Json.createObjectBuilder() JsonObjectBuilder documentObjectBuilder = Json.createObjectBuilder()
.add("id", documentDto.getId()) .add("id", documentDto.getId())
.add("highlight", JsonUtil.nullable(documentDto.getHighlight())) .add("highlight", JsonUtil.nullable(documentDto.getHighlight()))
.add("file_id", JsonUtil.nullable(documentDto.getFileId())) .add("file_id", JsonUtil.nullable(documentDto.getFileId()))
@@ -411,7 +466,17 @@ public class DocumentResource extends BaseResource {
.add("active_route", documentDto.isActiveRoute()) .add("active_route", documentDto.isActiveRoute())
.add("current_step_name", JsonUtil.nullable(documentDto.getCurrentStepName())) .add("current_step_name", JsonUtil.nullable(documentDto.getCurrentStepName()))
.add("file_count", documentDto.getFileCount()) .add("file_count", documentDto.getFileCount())
.add("tags", tags)); .add("tags", tags);
if (Boolean.TRUE == files) {
JsonArrayBuilder filesArrayBuilder = Json.createArrayBuilder();
// Find files matching the document
Collection<File> filesOfDocument = CollectionUtils.select(filesList, file -> file.getDocumentId().equals(documentDto.getId()));
for (File fileDb : filesOfDocument) {
filesArrayBuilder.add(RestUtil.fileToJsonObjectBuilder(fileDb));
}
documentObjectBuilder.add("files", filesArrayBuilder);
}
documents.add(documentObjectBuilder);
} }
JsonArrayBuilder suggestions = Json.createArrayBuilder(); JsonArrayBuilder suggestions = Json.createArrayBuilder();
@@ -442,16 +507,8 @@ public class DocumentResource extends BaseResource {
TagDao tagDao = new TagDao(); TagDao tagDao = new TagDao();
List<TagDto> allTagDtoList = tagDao.findByCriteria(new TagCriteria().setTargetIdList(getTargetIdList(null)), null); List<TagDto> allTagDtoList = tagDao.findByCriteria(new TagCriteria().setTargetIdList(getTargetIdList(null)), null);
UserDao userDao = new UserDao(); UserDao userDao = new UserDao();
DateTimeParser[] parsers = {
DateTimeFormat.forPattern("yyyy").getParser(),
DateTimeFormat.forPattern("yyyy-MM").getParser(),
DateTimeFormat.forPattern("yyyy-MM-dd").getParser() };
DateTimeFormatter yearFormatter = new DateTimeFormatter(null, parsers[0]);
DateTimeFormatter monthFormatter = new DateTimeFormatter(null, parsers[1]);
DateTimeFormatter dayFormatter = new DateTimeFormatter(null, parsers[2]);
DateTimeFormatter formatter = new DateTimeFormatterBuilder().append( null, parsers ).toFormatter();
String[] criteriaList = search.split(" *"); String[] criteriaList = search.split(" +");
List<String> query = new ArrayList<>(); List<String> query = new ArrayList<>();
List<String> fullQuery = new ArrayList<>(); List<String> fullQuery = new ArrayList<>();
for (String criteria : criteriaList) { for (String criteria : criteriaList) {
@@ -461,20 +518,16 @@ public class DocumentResource extends BaseResource {
fullQuery.add(criteria); fullQuery.add(criteria);
continue; continue;
} }
String paramName = params[0];
String paramValue = params[1];
switch (params[0]) { switch (paramName) {
case "tag": case "tag":
case "!tag": case "!tag":
// New tag criteria // New tag criteria
List<TagDto> tagDtoList = TagUtil.findByName(params[1], allTagDtoList); List<TagDto> tagDtoList = TagUtil.findByName(paramValue, allTagDtoList);
if (documentCriteria.getTagIdList() == null) {
documentCriteria.setTagIdList(new ArrayList<>());
}
if (documentCriteria.getExcludedTagIdList() == null) {
documentCriteria.setExcludedTagIdList(new ArrayList<>());
}
if (tagDtoList.isEmpty()) { if (tagDtoList.isEmpty()) {
// No tag found, the request must returns nothing // No tag found, the request must return nothing
documentCriteria.getTagIdList().add(Lists.newArrayList(UUID.randomUUID().toString())); documentCriteria.getTagIdList().add(Lists.newArrayList(UUID.randomUUID().toString()));
} else { } else {
List<String> tagIdList = Lists.newArrayList(); List<String> tagIdList = Lists.newArrayList();
@@ -485,7 +538,7 @@ public class DocumentResource extends BaseResource {
tagIdList.add(childrenTagDto.getId()); tagIdList.add(childrenTagDto.getId());
} }
} }
if (params[0].startsWith("!")) { if (paramName.startsWith("!")) {
documentCriteria.getExcludedTagIdList().add(tagIdList); documentCriteria.getExcludedTagIdList().add(tagIdList);
} else { } else {
documentCriteria.getTagIdList().add(tagIdList); documentCriteria.getTagIdList().add(tagIdList);
@@ -498,9 +551,9 @@ public class DocumentResource extends BaseResource {
case "ubefore": case "ubefore":
// New date span criteria // New date span criteria
try { try {
boolean isUpdated = params[0].startsWith("u"); boolean isUpdated = paramName.startsWith("u");
DateTime date = formatter.parseDateTime(params[1]); DateTime date = DATE_FORMATTER.parseDateTime(paramValue);
if (params[0].endsWith("before")) { if (paramName.endsWith("before")) {
if (isUpdated) documentCriteria.setUpdateDateMax(date.toDate()); if (isUpdated) documentCriteria.setUpdateDateMax(date.toDate());
else documentCriteria.setCreateDateMax(date.toDate()); else documentCriteria.setCreateDateMax(date.toDate());
} else { } else {
@@ -516,11 +569,11 @@ public class DocumentResource extends BaseResource {
case "uat": case "uat":
case "at": case "at":
// New specific date criteria // New specific date criteria
boolean isUpdated = params[0].startsWith("u");
try { try {
boolean isUpdated = params[0].startsWith("u"); switch (paramValue.length()) {
switch (params[1].length()) {
case 10: { case 10: {
DateTime date = dayFormatter.parseDateTime(params[1]); DateTime date = DATE_FORMATTER.parseDateTime(params[1]);
if (isUpdated) { if (isUpdated) {
documentCriteria.setUpdateDateMin(date.toDate()); documentCriteria.setUpdateDateMin(date.toDate());
documentCriteria.setUpdateDateMax(date.plusDays(1).minusSeconds(1).toDate()); documentCriteria.setUpdateDateMax(date.plusDays(1).minusSeconds(1).toDate());
@@ -531,7 +584,7 @@ public class DocumentResource extends BaseResource {
break; break;
} }
case 7: { case 7: {
DateTime date = monthFormatter.parseDateTime(params[1]); DateTime date = MONTH_FORMATTER.parseDateTime(params[1]);
if (isUpdated) { if (isUpdated) {
documentCriteria.setUpdateDateMin(date.toDate()); documentCriteria.setUpdateDateMin(date.toDate());
documentCriteria.setUpdateDateMax(date.plusMonths(1).minusSeconds(1).toDate()); documentCriteria.setUpdateDateMax(date.plusMonths(1).minusSeconds(1).toDate());
@@ -542,7 +595,7 @@ public class DocumentResource extends BaseResource {
break; break;
} }
case 4: { case 4: {
DateTime date = yearFormatter.parseDateTime(params[1]); DateTime date = YEAR_FORMATTER.parseDateTime(params[1]);
if (isUpdated) { if (isUpdated) {
documentCriteria.setUpdateDateMin(date.toDate()); documentCriteria.setUpdateDateMin(date.toDate());
documentCriteria.setUpdateDateMax(date.plusYears(1).minusSeconds(1).toDate()); documentCriteria.setUpdateDateMax(date.plusYears(1).minusSeconds(1).toDate());
@@ -551,6 +604,10 @@ public class DocumentResource extends BaseResource {
documentCriteria.setCreateDateMax(date.plusYears(1).minusSeconds(1).toDate()); documentCriteria.setCreateDateMax(date.plusYears(1).minusSeconds(1).toDate());
} }
break; break;
} default: {
// Invalid format, returns no documents
documentCriteria.setCreateDateMin(new Date(0));
documentCriteria.setCreateDateMax(new Date(0));
} }
} }
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
@@ -561,25 +618,26 @@ public class DocumentResource extends BaseResource {
break; break;
case "shared": case "shared":
// New shared state criteria // New shared state criteria
documentCriteria.setShared(params[1].equals("yes")); documentCriteria.setShared(paramValue.equals("yes"));
break; break;
case "lang": case "lang":
// New language criteria // New language criteria
if (Constants.SUPPORTED_LANGUAGES.contains(params[1])) { if (Constants.SUPPORTED_LANGUAGES.contains(paramValue)) {
documentCriteria.setLanguage(params[1]); documentCriteria.setLanguage(paramValue);
} else { } else {
// Unsupported language, returns no documents
documentCriteria.setLanguage(UUID.randomUUID().toString()); documentCriteria.setLanguage(UUID.randomUUID().toString());
} }
break; break;
case "mime": case "mime":
// New mime type criteria // New mime type criteria
documentCriteria.setMimeType(params[1]); documentCriteria.setMimeType(paramValue);
break; break;
case "by": case "by":
// New creator criteria // New creator criteria
User user = userDao.getActiveByUsername(params[1]); User user = userDao.getActiveByUsername(paramValue);
if (user == null) { if (user == null) {
// This user doesn't exists, return nothing // This user doesn't exist, return nothing
documentCriteria.setCreatorId(UUID.randomUUID().toString()); documentCriteria.setCreatorId(UUID.randomUUID().toString());
} else { } else {
// This user exists, search its documents // This user exists, search its documents
@@ -588,15 +646,19 @@ public class DocumentResource extends BaseResource {
break; break;
case "workflow": case "workflow":
// New shared state criteria // New shared state criteria
documentCriteria.setActiveRoute(params[1].equals("me")); documentCriteria.setActiveRoute(paramValue.equals("me"));
break; break;
case "simple": case "simple":
// New simple search criteria // New simple search criteria
query.add(params[1]); query.add(paramValue);
break; break;
case "full": case "full":
// New fulltext search criteria // New fulltext search criteria
fullQuery.add(params[1]); fullQuery.add(paramValue);
break;
case "title":
// New title criteria
documentCriteria.setTitle(paramValue);
break; break;
default: default:
fullQuery.add(criteria); fullQuery.add(criteria);

View File

@@ -21,6 +21,7 @@ import com.sismics.docs.core.util.FileUtil;
import com.sismics.rest.exception.ClientException; import com.sismics.rest.exception.ClientException;
import com.sismics.rest.exception.ForbiddenClientException; import com.sismics.rest.exception.ForbiddenClientException;
import com.sismics.rest.exception.ServerException; import com.sismics.rest.exception.ServerException;
import com.sismics.rest.util.RestUtil;
import com.sismics.rest.util.ValidationUtil; import com.sismics.rest.util.ValidationUtil;
import com.sismics.util.HttpUtil; import com.sismics.util.HttpUtil;
import com.sismics.util.JsonUtil; import com.sismics.util.JsonUtil;
@@ -42,6 +43,7 @@ import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.net.URISyntaxException; import java.net.URISyntaxException;
import java.net.URLDecoder; import java.net.URLDecoder;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Paths; import java.nio.file.Paths;
import java.nio.file.StandardCopyOption; import java.nio.file.StandardCopyOption;
@@ -111,10 +113,12 @@ public class FileResource extends BaseResource {
} }
// Keep unencrypted data temporary on disk // Keep unencrypted data temporary on disk
String name = fileBodyPart.getContentDisposition() != null ?
URLDecoder.decode(fileBodyPart.getContentDisposition().getFileName(), StandardCharsets.UTF_8) : null;
java.nio.file.Path unencryptedFile; java.nio.file.Path unencryptedFile;
long fileSize; long fileSize;
try { try {
unencryptedFile = AppContext.getInstance().getFileService().createTemporaryFile(); unencryptedFile = AppContext.getInstance().getFileService().createTemporaryFile(name);
Files.copy(fileBodyPart.getValueAs(InputStream.class), unencryptedFile, StandardCopyOption.REPLACE_EXISTING); Files.copy(fileBodyPart.getValueAs(InputStream.class), unencryptedFile, StandardCopyOption.REPLACE_EXISTING);
fileSize = Files.size(unencryptedFile); fileSize = Files.size(unencryptedFile);
} catch (IOException e) { } catch (IOException e) {
@@ -122,8 +126,6 @@ public class FileResource extends BaseResource {
} }
try { try {
String name = fileBodyPart.getContentDisposition() != null ?
URLDecoder.decode(fileBodyPart.getContentDisposition().getFileName(), "UTF-8") : null;
String fileId = FileUtil.createFile(name, previousFileId, unencryptedFile, fileSize, documentDto == null ? String fileId = FileUtil.createFile(name, previousFileId, unencryptedFile, fileSize, documentDto == null ?
null : documentDto.getLanguage(), principal.getId(), documentId); null : documentDto.getLanguage(), principal.getId(), documentId);
@@ -427,27 +429,13 @@ public class FileResource extends BaseResource {
} }
FileDao fileDao = new FileDao(); FileDao fileDao = new FileDao();
List<File> fileList = fileDao.getByDocumentId(principal.getId(), documentId);
JsonArrayBuilder files = Json.createArrayBuilder(); JsonArrayBuilder files = Json.createArrayBuilder();
for (File fileDb : fileList) { for (File fileDb : fileDao.getByDocumentId(principal.getId(), documentId)) {
try { files.add(RestUtil.fileToJsonObjectBuilder(fileDb));
files.add(Json.createObjectBuilder()
.add("id", fileDb.getId())
.add("processing", FileUtil.isProcessingFile(fileDb.getId()))
.add("name", JsonUtil.nullable(fileDb.getName()))
.add("version", fileDb.getVersion())
.add("mimetype", fileDb.getMimeType())
.add("document_id", JsonUtil.nullable(fileDb.getDocumentId()))
.add("create_date", fileDb.getCreateDate().getTime())
.add("size", Files.size(DirectoryUtil.getStorageDirectory().resolve(fileDb.getId()))));
} catch (IOException e) {
throw new ServerException("FileError", "Unable to get the size of " + fileDb.getId(), e);
}
} }
JsonObjectBuilder response = Json.createObjectBuilder() JsonObjectBuilder response = Json.createObjectBuilder()
.add("files", files); .add("files", files);
return Response.ok().entity(response.build()).build(); return Response.ok().entity(response.build()).build();
} }
@@ -587,6 +575,7 @@ public class FileResource extends BaseResource {
*/ */
@GET @GET
@Path("{id: [a-z0-9\\-]+}/data") @Path("{id: [a-z0-9\\-]+}/data")
@Produces(MediaType.APPLICATION_OCTET_STREAM)
public Response data( public Response data(
@PathParam("id") final String fileId, @PathParam("id") final String fileId,
@QueryParam("share") String shareId, @QueryParam("share") String shareId,
@@ -676,23 +665,24 @@ public class FileResource extends BaseResource {
/** /**
* Returns all files from a document, zipped. * Returns all files from a document, zipped.
* *
* @api {get} /file/zip Get zipped files * @api {get} /file/zip Returns all files from a document, zipped.
* @apiName GetFileZip * @apiName GetFileZip
* @apiGroup File * @apiGroup File
* @apiParam {String} id Document ID * @apiParam {String} id Document ID
* @apiParam {String} share Share ID * @apiParam {String} share Share ID
* @apiSuccess {Object} file The ZIP file is the whole response * @apiSuccess {Object} file The ZIP file is the whole response
* @apiError (client) NotFound Document not found * @apiError (client) NotFoundException Document not found
* @apiError (server) InternalServerError Error creating the ZIP file * @apiError (server) InternalServerError Error creating the ZIP file
* @apiPermission none * @apiPermission none
* @apiVersion 1.5.0 * @apiVersion 1.5.0
* *
* @param documentId Document ID * @param documentId Document ID
* @param shareId Share ID
* @return Response * @return Response
*/ */
@GET @GET
@Path("zip") @Path("zip")
@Produces(MediaType.APPLICATION_OCTET_STREAM) @Produces({MediaType.APPLICATION_OCTET_STREAM, MediaType.TEXT_PLAIN})
public Response zip( public Response zip(
@QueryParam("id") String documentId, @QueryParam("id") String documentId,
@QueryParam("share") String shareId) { @QueryParam("share") String shareId) {
@@ -705,10 +695,44 @@ public class FileResource extends BaseResource {
throw new NotFoundException(); throw new NotFoundException();
} }
// Get files and user associated with this document // Get files associated with this document
FileDao fileDao = new FileDao(); FileDao fileDao = new FileDao();
final UserDao userDao = new UserDao();
final List<File> fileList = fileDao.getByDocumentId(principal.getId(), documentId); final List<File> fileList = fileDao.getByDocumentId(principal.getId(), documentId);
String zipFileName = documentDto.getTitle().replaceAll("\\W+", "_");
return sendZippedFiles(zipFileName, fileList);
}
/**
* Returns a list of files, zipped
*
* @api {post} /file/zip Returns a list of files, zipped
* @apiName GetFilesZip
* @apiGroup File
* @apiParam {String[]} files IDs
* @apiSuccess {Object} file The ZIP file is the whole response
* @apiError (client) NotFoundException Files not found
* @apiError (server) InternalServerError Error creating the ZIP file
* @apiPermission none
* @apiVersion 1.11.0
*
* @param filesIdsList Files IDs
* @return Response
*/
@POST
@Path("zip")
@Produces({MediaType.APPLICATION_OCTET_STREAM, MediaType.TEXT_PLAIN})
public Response zip(
@FormParam("files") List<String> filesIdsList) {
authenticate();
List<File> fileList = findFiles(filesIdsList);
return sendZippedFiles("files", fileList);
}
/**
* Sent the content of a list of files.
*/
private Response sendZippedFiles(String zipFileName, List<File> fileList) {
final UserDao userDao = new UserDao();
// Create the ZIP stream // Create the ZIP stream
StreamingOutput stream = outputStream -> { StreamingOutput stream = outputStream -> {
@@ -739,7 +763,7 @@ public class FileResource extends BaseResource {
// Write to the output // Write to the output
return Response.ok(stream) return Response.ok(stream)
.header("Content-Type", "application/zip") .header("Content-Type", "application/zip")
.header("Content-Disposition", "attachment; filename=\"" + documentDto.getTitle().replaceAll("\\W+", "_") + ".zip\"") .header("Content-Disposition", "attachment; filename=\"" + zipFileName + ".zip\"")
.build(); .build();
} }
@@ -756,7 +780,32 @@ public class FileResource extends BaseResource {
if (file == null) { if (file == null) {
throw new NotFoundException(); throw new NotFoundException();
} }
checkFileAccessible(shareId, file);
return file;
}
/**
* Find a list of files with access rights checking.
*
* @param filesIds Files IDs
* @return List<File>
*/
private List<File> findFiles(List<String> filesIds) {
FileDao fileDao = new FileDao();
List<File> files = fileDao.getFiles(filesIds);
for (File file : files) {
checkFileAccessible(null, file);
}
return files;
}
/**
* Check if a file is accessible to the current user
* @param shareId Share ID
* @param file
*/
private void checkFileAccessible(String shareId, File file) {
if (file.getDocumentId() == null) { if (file.getDocumentId() == null) {
// It's an orphan file // It's an orphan file
if (!file.getUserId().equals(principal.getId())) { if (!file.getUserId().equals(principal.getId())) {
@@ -770,6 +819,5 @@ public class FileResource extends BaseResource {
throw new ForbiddenClientException(); throw new ForbiddenClientException();
} }
} }
return file;
} }
} }

View File

@@ -313,7 +313,7 @@ public class GroupResource extends BaseResource {
* @return Response * @return Response
*/ */
@DELETE @DELETE
@Path("{groupName: [a-zA-Z0-9_]+}/{username: [a-zA-Z0-9_]+}") @Path("{groupName: [a-zA-Z0-9_]+}/{username: [a-zA-Z0-9_@\\.]+}")
public Response removeMember(@PathParam("groupName") String groupName, public Response removeMember(@PathParam("groupName") String groupName,
@PathParam("username") String username) { @PathParam("username") String username) {
if (!authenticate()) { if (!authenticate()) {

View File

@@ -88,7 +88,7 @@ public class UserResource extends BaseResource {
// Validate the input data // Validate the input data
username = ValidationUtil.validateLength(username, "username", 3, 50); username = ValidationUtil.validateLength(username, "username", 3, 50);
ValidationUtil.validateAlphanumeric(username, "username"); ValidationUtil.validateUsername(username, "username");
password = ValidationUtil.validateLength(password, "password", 8, 50); password = ValidationUtil.validateLength(password, "password", 8, 50);
email = ValidationUtil.validateLength(email, "email", 1, 100); email = ValidationUtil.validateLength(email, "email", 1, 100);
Long storageQuota = ValidationUtil.validateLong(storageQuotaStr, "storage_quota"); Long storageQuota = ValidationUtil.validateLong(storageQuotaStr, "storage_quota");
@@ -195,7 +195,7 @@ public class UserResource extends BaseResource {
* @return Response * @return Response
*/ */
@POST @POST
@Path("{username: [a-zA-Z0-9_]+}") @Path("{username: [a-zA-Z0-9_@\\.]+}")
public Response update( public Response update(
@PathParam("username") String username, @PathParam("username") String username,
@FormParam("password") String password, @FormParam("password") String password,
@@ -511,7 +511,7 @@ public class UserResource extends BaseResource {
* @return Response * @return Response
*/ */
@DELETE @DELETE
@Path("{username: [a-zA-Z0-9_]+}") @Path("{username: [a-zA-Z0-9_@\\.]+}")
public Response delete(@PathParam("username") String username) { public Response delete(@PathParam("username") String username) {
if (!authenticate()) { if (!authenticate()) {
throw new ForbiddenClientException(); throw new ForbiddenClientException();
@@ -591,7 +591,7 @@ public class UserResource extends BaseResource {
* @return Response * @return Response
*/ */
@POST @POST
@Path("{username: [a-zA-Z0-9_]+}/disable_totp") @Path("{username: [a-zA-Z0-9_@\\.]+}/disable_totp")
public Response disableTotpUsername(@PathParam("username") String username) { public Response disableTotpUsername(@PathParam("username") String username) {
if (!authenticate()) { if (!authenticate()) {
throw new ForbiddenClientException(); throw new ForbiddenClientException();
@@ -713,7 +713,7 @@ public class UserResource extends BaseResource {
* @return Response * @return Response
*/ */
@GET @GET
@Path("{username: [a-zA-Z0-9_]+}") @Path("{username: [a-zA-Z0-9_@\\.]+}")
@Produces(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON)
public Response view(@PathParam("username") String username) { public Response view(@PathParam("username") String username) {
if (!authenticate()) { if (!authenticate()) {
@@ -1064,7 +1064,6 @@ public class UserResource extends BaseResource {
* @apiGroup User * @apiGroup User
* @apiParam {String} username Username * @apiParam {String} username Username
* @apiSuccess {String} status Status OK * @apiSuccess {String} status Status OK
* @apiError (client) UserNotFound The user is not found
* @apiError (client) ValidationError Validation error * @apiError (client) ValidationError Validation error
* @apiPermission none * @apiPermission none
* @apiVersion 1.5.0 * @apiVersion 1.5.0
@@ -1081,11 +1080,16 @@ public class UserResource extends BaseResource {
// Validate input data // Validate input data
ValidationUtil.validateStringNotBlank("username", username); ValidationUtil.validateStringNotBlank("username", username);
// Prepare response
Response response = Response.ok().entity(Json.createObjectBuilder()
.add("status", "ok")
.build()).build();
// Check for user existence // Check for user existence
UserDao userDao = new UserDao(); UserDao userDao = new UserDao();
List<UserDto> userDtoList = userDao.findByCriteria(new UserCriteria().setUserName(username), null); List<UserDto> userDtoList = userDao.findByCriteria(new UserCriteria().setUserName(username), null);
if (userDtoList.isEmpty()) { if (userDtoList.isEmpty()) {
throw new ClientException("UserNotFound", "User not found: " + username); return response;
} }
UserDto user = userDtoList.get(0); UserDto user = userDtoList.get(0);
@@ -1102,9 +1106,7 @@ public class UserResource extends BaseResource {
AppContext.getInstance().getMailEventBus().post(passwordLostEvent); AppContext.getInstance().getMailEventBus().post(passwordLostEvent);
// Always return OK // Always return OK
JsonObjectBuilder response = Json.createObjectBuilder() return response;
.add("status", "ok");
return Response.ok().entity(response.build()).build();
} }
/** /**

View File

@@ -10,7 +10,7 @@ The base URL depends on your server. If your instance of Teedy is accessible thr
`https://teedy.mycompany.com`, then the base API URL is `https://teedy.mycompany.com/api`. `https://teedy.mycompany.com`, then the base API URL is `https://teedy.mycompany.com/api`.
## Verbs and status codes ## Verbs and status codes
The API uses restful verbs. The API uses RESTful verbs.
| Verb | Description | | Verb | Description |
|---|---| |---|---|
@@ -47,3 +47,42 @@ A call to this API with a given `auth_token` cookie will make it unusable for ot
``` ```
curl -i -X POST -H "Cookie: auth_token=64085630-2ae6-415c-9a92-4b22c107eaa4" https://docs.mycompany.com/api/user/logout curl -i -X POST -H "Cookie: auth_token=64085630-2ae6-415c-9a92-4b22c107eaa4" https://docs.mycompany.com/api/user/logout
``` ```
## Document search syntax
The `/api/document/list` endpoint use a String `search` parameter.
This parameter is split in segments using the space character (the other whitespace characters are not considered).
If a segment contains exactly one colon (`:`), it will used as a field criteria (see bellow).
In other cases (zero or more than one colon), the segment will be used as a search criteria for all fields including the document's files content.
### Search fields
If a search `VALUE` is considered invalid, the search result will be empty.
* Content
* `full:VALUE`: `VALUE` is used as search criteria for all fields, including the document's files content
* `simple:VALUE`: `VALUE` is used as a search criteria for all fields except the document's files content
* Date
* `after:VALUE`: the document must have been created after or at the `VALUE` moment, accepted format are `yyyy`, `yyyy-MM` and `yyyy-MM-dd`
* `at:VALUE`: the document must have been created at the `VALUE` moment, accepted format are `yyyy`, `yyyy-MM` and `yyyy-MM-dd` (for `yyyy` it must be the same year, for `yyyy-MM` the same month, for `yyyy-MM-dd` the same day)
* `before:VALUE`: the document must have been created before or at the `VALUE` moment, accepted format are `yyyy`, `yyyy-MM` and `yyyy-MM-dd`
* `uafter:VALUE`: the document must have been last updated after or at the `VALUE` moment, accepted format are `yyyy`, `yyyy-MM` and `yyyy-MM-dd`
* `at:VALUE`: the document must have been updated at the `VALUE` moment, accepted format are `yyyy`, `yyyy-MM` and `yyyy-MM-dd` (for `yyyy` it must be the same year, for `yyyy-MM` the same month, for `yyyy-MM-dd` the same day)
* `ubefore:VALUE`: the document must have been updated before or at the `VALUE` moment, accepted format are `yyyy`, `yyyy-MM` and `yyyy-MM-dd`
* Language
* `lang:VALUE`: the document must be of the specified language (example: `en`)
* Mime
* `mime:VALUE`: the document must be of the specified mime type (example: `image/png`)
* Shared
* `shared:VALUE`: if `VALUE` is `yes`the document must be shared, for other `VALUE`s the criteria is ignored
* Tags
* `tag:VALUE`: the document must contain a tag or a child of a tag that starts with `VALUE`, case is ignored
* `!tag:VALUE`: the document must not contain a tag or a child of a tag that starts with `VALUE`, case is ignored
* Title
* `title:VALUE`: the title of the document must be `VALUE`
* User
* `by:VALUE`: the document creator's username must be `VALUE` with an exact match, the user must not be deleted
* Workflow
* `workflow:VALUE`: if `VALUE` is `me` the document must have an active route, for other `VALUE`s the criteria is ignored

View File

@@ -424,7 +424,7 @@ angular.module('docs',
// Configuring Angular Translate // Configuring Angular Translate
$translateProvider $translateProvider
.useSanitizeValueStrategy(null) .useSanitizeValueStrategy('escapeParameters')
.useStaticFilesLoader({ .useStaticFilesLoader({
prefix: 'locale/', prefix: 'locale/',
suffix: '.json?@build.date@' suffix: '.json?@build.date@'
@@ -462,6 +462,9 @@ angular.module('docs',
// Configuring $http to act like jQuery.ajax // Configuring $http to act like jQuery.ajax
$httpProvider.defaults.headers.post['Content-Type'] = 'application/x-www-form-urlencoded;charset=utf-8'; $httpProvider.defaults.headers.post['Content-Type'] = 'application/x-www-form-urlencoded;charset=utf-8';
$httpProvider.defaults.headers.put['Content-Type'] = 'application/x-www-form-urlencoded;charset=utf-8'; $httpProvider.defaults.headers.put['Content-Type'] = 'application/x-www-form-urlencoded;charset=utf-8';
$httpProvider.defaults.headers.delete = {
'Content-Type': 'application/x-www-form-urlencoded;charset=utf-8'
};
$httpProvider.defaults.transformRequest = [function(data) { $httpProvider.defaults.transformRequest = [function(data) {
var param = function(obj) { var param = function(obj) {
var query = ''; var query = '';
@@ -541,7 +544,9 @@ angular.module('docs',
{ key: 'swe', label: 'Svenska' }, { key: 'swe', label: 'Svenska' },
{ key: 'lav', label: 'Latviešu' }, { key: 'lav', label: 'Latviešu' },
{ key: 'dan', label: 'Dansk' }, { key: 'dan', label: 'Dansk' },
{ key: 'nor', label: 'Norsk' } { key: 'nor', label: 'Norsk' },
{ key: 'vie', label: 'Tiếng Việt' },
{ key: 'ces', label: 'Czech' }
]; ];
}) })
/** /**

View File

@@ -56,7 +56,7 @@ angular.module('share',
// Configuring Angular Translate // Configuring Angular Translate
$translateProvider $translateProvider
.useSanitizeValueStrategy(null) .useSanitizeValueStrategy('escapeParameters')
.useStaticFilesLoader({ .useStaticFilesLoader({
prefix: 'locale/', prefix: 'locale/',
suffix: '.json?@build.date@' suffix: '.json?@build.date@'
@@ -88,6 +88,9 @@ angular.module('share',
// Configuring $http to act like jQuery.ajax // Configuring $http to act like jQuery.ajax
$httpProvider.defaults.headers.post['Content-Type'] = 'application/x-www-form-urlencoded;charset=utf-8'; $httpProvider.defaults.headers.post['Content-Type'] = 'application/x-www-form-urlencoded;charset=utf-8';
$httpProvider.defaults.headers.put['Content-Type'] = 'application/x-www-form-urlencoded;charset=utf-8'; $httpProvider.defaults.headers.put['Content-Type'] = 'application/x-www-form-urlencoded;charset=utf-8';
$httpProvider.defaults.headers.delete = {
'Content-Type': 'application/x-www-form-urlencoded;charset=utf-8'
};
$httpProvider.defaults.transformRequest = [function(data) { $httpProvider.defaults.transformRequest = [function(data) {
var param = function(obj) { var param = function(obj) {
var query = ''; var query = '';

View File

@@ -32,24 +32,24 @@
"nav_documents": "Dokumente", "nav_documents": "Dokumente",
"nav_tags": "Tags", "nav_tags": "Tags",
"nav_users_groups": "Benutzer & Gruppen", "nav_users_groups": "Benutzer & Gruppen",
"error_info": "{{ count }} neuer Fehler{{ count > 1 ? 's' : '' }}", "error_info": "{{ count }} {{ count > 1 ? 'neue' : 'neuer' }} Fehler",
"logged_as": "Eingeloggt als {{ username }}", "logged_as": "Eingeloggt als {{ username }}",
"nav_settings": "Einstellungen", "nav_settings": "Einstellungen",
"logout": "Logout", "logout": "Logout",
"global_quota_warning": "<strong>Warnung!</strong> Der frei zur Verfügung stehende, maximale Speicherplatz ist fast erreicht bei {{ current | number: 0 }}MB ({{ percent | number: 1 }}%) verwendet {{ total | number: 0 }}MB" "global_quota_warning": "<strong>Warnung!</strong> Der verfügbare Speicherplatz beträgt {{ total | number: 0 }}\u00A0MB, davon sind {{ current | number: 0 }}\u00A0MB ({{ percent | number: 1 }}\u00A0%) bereits verwendet"
}, },
"document": { "document": {
"navigation_up": "Eine Stufe höher", "navigation_up": "Eine Ebene höher",
"toggle_navigation": "Navigation ein-/ausblenden", "toggle_navigation": "Navigation ein-/ausblenden",
"display_mode_list": "Dokumente in Liste anzeigen", "display_mode_list": "Dokumente in Liste anzeigen",
"display_mode_grid": "Dokumente im Raster anzeigen", "display_mode_grid": "Dokumente im Raster anzeigen",
"search_simple": "Einfache Suche", "search_simple": "Einfache Suche",
"search_fulltext": "Volltext Suche", "search_fulltext": "Volltextsuche",
"search_creator": "Urheber", "search_creator": "Urheber",
"search_language": "Sprache", "search_language": "Sprache",
"search_before_date": "Vor diesem Datum", "search_before_date": "Vor diesem Datum",
"search_after_date": "Nach diesem Datum", "search_after_date": "Nach diesem Datum",
"search_before_update_date": "Bearbeitet bevor diesem Datum", "search_before_update_date": "Bearbeitet vor diesem Datum",
"search_after_update_date": "Bearbeitet nach diesem Datum", "search_after_update_date": "Bearbeitet nach diesem Datum",
"search_tags": "Tags", "search_tags": "Tags",
"search_shared": "Nur freigegebene Dokumente", "search_shared": "Nur freigegebene Dokumente",
@@ -83,9 +83,9 @@
"page_size_10": "10 pro Seite", "page_size_10": "10 pro Seite",
"page_size_20": "20 pro Seite", "page_size_20": "20 pro Seite",
"page_size_30": "30 pro Seite", "page_size_30": "30 pro Seite",
"upgrade_quota": "Fragen Sie Ihren Administrator, um Ihr Speicherplatz zu erweitern.", "upgrade_quota": "Fragen Sie Ihren Administrator, um Ihren Speicherplatz zu erweitern.",
"quota": "{{ current | number: 0 }}MB ({{ percent | number: 1 }}%) verwendet von {{ total | number: 0 }}MB", "quota": "{{ current | number: 0 }}\u00A0MB ({{ percent | number: 1 }}\u00A0%) verwendet von {{ total | number: 0 }}\u00A0MB",
"count": "{{ count }} Dokument{{ count > 1 ? 'e' : '' }} gefunden", "count": "{{ count }} {{ count > 1 ? 'Dokumente' : 'Dokument' }} gefunden",
"last_updated": "Zuletzt bearbeitet {{ date | timeAgo: dateFormat }}", "last_updated": "Zuletzt bearbeitet {{ date | timeAgo: dateFormat }}",
"view": { "view": {
"delete_comment_title": "Kommentar löschen", "delete_comment_title": "Kommentar löschen",
@@ -104,7 +104,7 @@
"add_comment": "Fügen sie einen Kommentar hinzu", "add_comment": "Fügen sie einen Kommentar hinzu",
"error_loading_comments": "Fehler beim Laden eines Kommentars", "error_loading_comments": "Fehler beim Laden eines Kommentars",
"workflow_current": "Aktueller Workflow-Status", "workflow_current": "Aktueller Workflow-Status",
"workflow_comment": "Fügen Sie einen Workflow Kommentar hinzu", "workflow_comment": "Fügen Sie einen Workflow-Kommentar hinzu",
"workflow_validated_title": "Workflow-Schritt validiert", "workflow_validated_title": "Workflow-Schritt validiert",
"workflow_validated_message": "Der Workflow-Schritt wurde erfolgreich validiert.", "workflow_validated_message": "Der Workflow-Schritt wurde erfolgreich validiert.",
"display_mode_list": "Dateien in Liste anzeigen", "display_mode_list": "Dateien in Liste anzeigen",
@@ -113,13 +113,13 @@
"content": "Inhalt", "content": "Inhalt",
"delete_file_title": "Datei löschen", "delete_file_title": "Datei löschen",
"delete_file_message": "Wollen Sie diese Datei wirklich löschen?", "delete_file_message": "Wollen Sie diese Datei wirklich löschen?",
"upload_pending": "Ausstehend...", "upload_pending": "Ausstehend\u2026",
"upload_progress": "Hochladen...", "upload_progress": "Hochladen\u2026",
"upload_error": "Fehler beim Hochladen", "upload_error": "Fehler beim Hochladen",
"upload_error_quota": "Maximaler Speicherplatz erreicht", "upload_error_quota": "Maximaler Speicherplatz erreicht",
"drop_zone": "Drag & Drop Dateien hierherziehen, um diese hochzuladen", "drop_zone": "Legen Sie Dateien hier ab, um sie hochzuladen",
"add_files": "Dateien hinzufügen", "add_files": "Dateien hinzufügen",
"file_processing_indicator": "Diese Datei wird gerade bearbeitet. Die Suche wird nicht verfügbar sein, bevor der Vorgang abgeschlossen ist.", "file_processing_indicator": "Diese Datei wird gerade verarbeitet. Die Suche ist erst verfügbar, wenn diese Verarbeitung abgeschlossen ist.",
"reprocess_file": "Diese Datei erneut verarbeiten", "reprocess_file": "Diese Datei erneut verarbeiten",
"upload_new_version": "Neue Version hochladen", "upload_new_version": "Neue Version hochladen",
"open_versions": "Versionshistorie anzeigen" "open_versions": "Versionshistorie anzeigen"
@@ -158,27 +158,27 @@
"title_placeholder": "Titel des Dokuments", "title_placeholder": "Titel des Dokuments",
"description_placeholder": "Zusammenfassung, Inhaltsverzeichnis oder Freitext", "description_placeholder": "Zusammenfassung, Inhaltsverzeichnis oder Freitext",
"new_files": "neue Dateien", "new_files": "neue Dateien",
"orphan_files": "+ {{ count }} Datei{{ count > 1 ? 's' : '' }}", "orphan_files": "+ {{ count }} {{ count > 1 ? 'Dateien' : 'Datei' }}",
"additional_metadata": "Weitere Metadaten", "additional_metadata": "Weitere Metadaten",
"subject_placeholder": "Schlüsselwörter, abstrakte Sätze oder Klassifizierungscodes", "subject_placeholder": "Schlüsselwörter, abstrakte Sätze oder Klassifizierungscodes",
"identifier_placeholder": "Eindeutiger Identifikator", "identifier_placeholder": "Eindeutiger Identifikator",
"publisher_placeholder": "Name der Person, Organisation oder Abteilung, von der aus das Dokument veröffentlicht wurde.", "publisher_placeholder": "Name der Person, Organisation oder Abteilung, von der aus das Dokument veröffentlicht wurde.",
"format_placeholder": "MIME-Typ oder physisches Format des Dokuments", "format_placeholder": "MIME-Typ oder physisches Format des Dokuments",
"source_placeholder": "Ressource, aus der das Dokument stammt", "source_placeholder": "Ressource, aus der das Dokument stammt",
"uploading_files": "Dateien hochladen..." "uploading_files": "Dateien hochladen\u2026"
}, },
"default": { "default": {
"upload_pending": "Ausstehend...", "upload_pending": "Ausstehend\u2026",
"upload_progress": "Lädt hoch...", "upload_progress": "Lädt hoch\u2026",
"upload_error": "Fehler beim Hochladen", "upload_error": "Fehler beim Hochladen",
"upload_error_quota": "Maximaler Speicherplatz erreicht", "upload_error_quota": "Maximaler Speicherplatz erreicht",
"quick_upload": "Schnelles Hochladen", "quick_upload": "Schnelles Hochladen",
"drop_zone": "Drag & Drop Dateien hierherziehen, um diese hochzuladen", "drop_zone": "Legen Sie Dateien hier ab, um sie hochzuladen",
"add_files": "Dateien hinzufügen", "add_files": "Dateien hinzufügen",
"add_new_document": "Neues Dokument hinzufügen", "add_new_document": "Neues Dokument hinzufügen",
"latest_activity": "Letzte Aktivitäten", "latest_activity": "Letzte Aktivitäten",
"footer_sismics": "Programmiert mit <span class=\"fas fa-heart\"></span> von <a href=\"https://www.sismics.com\" target=\"_blank\">Sismics</a>", "footer_sismics": "Programmiert mit <span class=\"fas fa-heart\"></span> von <a href=\"https://www.sismics.com\" target=\"_blank\">Sismics</a>",
"api_documentation": "API Dokumentation", "api_documentation": "API-Dokumentation",
"feedback": "Geben Sie uns Ihr Feedback", "feedback": "Geben Sie uns Ihr Feedback",
"workflow_document_list": "Mir zugeordnete Dokumente", "workflow_document_list": "Mir zugeordnete Dokumente",
"select_all": "Alle auswählen", "select_all": "Alle auswählen",
@@ -186,8 +186,8 @@
}, },
"pdf": { "pdf": {
"export_title": "Export als PDF", "export_title": "Export als PDF",
"export_metadata": "Export Metadaten", "export_metadata": "Metadaten exportieren",
"export_comments": "Export Kommentare", "export_comments": "Kommentare exportieren",
"fit_to_page": "Bild an Seite anpassen", "fit_to_page": "Bild an Seite anpassen",
"margin": "Rand", "margin": "Rand",
"millimeter": "mm" "millimeter": "mm"
@@ -200,17 +200,17 @@
}, },
"file": { "file": {
"view": { "view": {
"previous": "Vorheriges", "previous": "Vorherige",
"next": "Nächstes", "next": "Nächste",
"not_found": "Datei nicht gefunden" "not_found": "Datei nicht gefunden"
}, },
"edit": { "edit": {
"title": "Datei bearbeiten", "title": "Datei bearbeiten",
"name": "Dateinamen" "name": "Dateiname"
}, },
"versions": { "versions": {
"title": "Versionshistorie", "title": "Versionshistorie",
"filename": "Datiename", "filename": "Dateiname",
"mimetype": "Typ", "mimetype": "Typ",
"create_date": "Erstellungsdatum", "create_date": "Erstellungsdatum",
"version": "Version" "version": "Version"
@@ -223,17 +223,17 @@
"title": "Tags", "title": "Tags",
"message_1": "<strong>Tags</strong> sind Kategorien, die den Dokumenten zugeordnet sind.", "message_1": "<strong>Tags</strong> sind Kategorien, die den Dokumenten zugeordnet sind.",
"message_2": "Ein Dokument kann mit mehreren Tags versehen werden und ein Tag kann auf mehrere Dokumente angewendet werden.", "message_2": "Ein Dokument kann mit mehreren Tags versehen werden und ein Tag kann auf mehrere Dokumente angewendet werden.",
"message_3": "Unter Verwendung der <span class=\"glyphicon glyphicon-pencil\"></span> Schaltfläche können Sie die Berechtigungen für ein Tag bearbeiten.", "message_3": "Mit der <span class=\"glyphicon glyphicon-pencil\"></span>-Schaltfläche können Sie die Berechtigungen für ein Tag bearbeiten.",
"message_4": "Wenn ein Tag von einem anderen Benutzer oder einer anderen Gruppe gelesen werden kann, können die zugehörigen Dokumente auch von diesen Personen gelesen werden.", "message_4": "Wenn ein Tag von einem anderen Benutzer oder einer anderen Gruppe gelesen werden kann, können die zugehörigen Dokumente auch von diesen Personen gelesen werden.",
"message_5": "Kennzeichnen Sie z.B. Ihre Firmendokumente mit einem Tag <span class=\"label label-info\">MyCompany</span> und fügen Sie die Berechtigung <strong>Can read</strong> zu einer Gruppe hinzu <span class=\"btn btn-default\">employees</span>" "message_5": "Kennzeichnen Sie z.\u00A0B. Ihre Firmendokumente mit einem Tag <span class=\"label label-info\">MyCompany</span> und fügen Sie die Berechtigung <strong>Kann lesen</strong> zu einer Gruppe <span class=\"btn btn-default\">Mitarbeiter</span> hinzu"
}, },
"edit": { "edit": {
"delete_tag_title": "Tag löschen", "delete_tag_title": "Tag löschen",
"delete_tag_message": "Wollen Sie diesen Tag wirklich löschen?", "delete_tag_message": "Wollen Sie dieses Tag wirklich löschen?",
"name": "Name", "name": "Name",
"color": "Farbe", "color": "Farbe",
"parent": "Übergeordnet", "parent": "Übergeordnet",
"info": "Berechtigungen für dieses Tag werden auch auf Dokumente angewendet, die mit einem Tag versehen sind <span class=\"label label-info\" ng-style=\"{ 'background': color }\">{{ name }}</span>", "info": "Berechtigungen für dieses Tag werden auch auf Dokumente angewendet, die mit einem Tag <span class=\"label label-info\" ng-style=\"{ 'background': color }\">{{ name }}</span> versehen sind",
"circular_reference_title": "Zirkuläre Referenz", "circular_reference_title": "Zirkuläre Referenz",
"circular_reference_message": "Die Hierarchie der übergeordneten Tags bildet eine Schleife. Bitte wählen Sie ein anderes übergeordnetes Tag." "circular_reference_message": "Die Hierarchie der übergeordneten Tags bildet eine Schleife. Bitte wählen Sie ein anderes übergeordnetes Tag."
} }
@@ -250,7 +250,7 @@
"profile": { "profile": {
"groups": "Gruppen", "groups": "Gruppen",
"quota_used": "Benutzter Speicherplatz", "quota_used": "Benutzter Speicherplatz",
"percent_used": "{{ percent | number: 0 }}% genutzt", "percent_used": "{{ percent | number: 0 }}\u00A0% genutzt",
"related_links": "Weiterführende Links", "related_links": "Weiterführende Links",
"document_created": "Dokumente erstellt von {{ username }}", "document_created": "Dokumente erstellt von {{ username }}",
"edit_user": "Benutzer {{ username }} bearbeiten" "edit_user": "Benutzer {{ username }} bearbeiten"
@@ -258,8 +258,8 @@
}, },
"usergroup": { "usergroup": {
"search_groups": "In Gruppen suchen", "search_groups": "In Gruppen suchen",
"search_users": "In Benutzer suchen", "search_users": "In Benutzern suchen",
"you": "Eigenes Benutzerkonto!", "you": "Das sind Sie!",
"default": { "default": {
"title": "Benutzer und Gruppen", "title": "Benutzer und Gruppen",
"message": "Hier können Sie Informationen über Benutzer und Gruppen einsehen." "message": "Hier können Sie Informationen über Benutzer und Gruppen einsehen."
@@ -270,8 +270,8 @@
"menu_user_account": "Benutzerkonto", "menu_user_account": "Benutzerkonto",
"menu_two_factor_auth": "Zwei-Faktor-Authentifizierung", "menu_two_factor_auth": "Zwei-Faktor-Authentifizierung",
"menu_opened_sessions": "Geöffnete Sitzungen", "menu_opened_sessions": "Geöffnete Sitzungen",
"menu_file_importer": "Massen Datei Importer", "menu_file_importer": "Massen-Datei-Importer",
"menu_general_settings": "Generelle Einstellungen", "menu_general_settings": "Allgemeine Einstellungen",
"menu_workflow": "Workflows", "menu_workflow": "Workflows",
"menu_users": "Benutzerverwaltung", "menu_users": "Benutzerverwaltung",
"menu_groups": "Gruppenverwaltung", "menu_groups": "Gruppenverwaltung",
@@ -293,10 +293,10 @@
"user_used_message": "Dieser Benutzer wird im Workflow \"{{ name }}\" benutzt", "user_used_message": "Dieser Benutzer wird im Workflow \"{{ name }}\" benutzt",
"edit_user_failed_title": "Dieser Benutzer existiert bereits", "edit_user_failed_title": "Dieser Benutzer existiert bereits",
"edit_user_failed_message": "Dieser Benutzername wurde bereits von einem anderen Benutzer gewählt", "edit_user_failed_message": "Dieser Benutzername wurde bereits von einem anderen Benutzer gewählt",
"edit_user_title": "Bearbeiten \"{{ username }}\"", "edit_user_title": "Benutzer \"{{ username }}\" bearbeiten",
"add_user_title": "Neuen Benutzer hinzufügen", "add_user_title": "Neuen Benutzer hinzufügen",
"username": "Benutzername", "username": "Benutzername",
"email": "E-mail", "email": "E-Mail",
"groups": "Gruppen", "groups": "Gruppen",
"storage_quota": "Speicherkontingent", "storage_quota": "Speicherkontingent",
"storage_quota_placeholder": "Speicherkontingent (in MB)", "storage_quota_placeholder": "Speicherkontingent (in MB)",
@@ -304,8 +304,8 @@
"password_confirm": "Passwort (bestätigen)", "password_confirm": "Passwort (bestätigen)",
"disabled": "Deaktivierter Benutzer", "disabled": "Deaktivierter Benutzer",
"password_reset_btn": "Senden Sie eine E-Mail zum Zurücksetzen des Kennworts an diesen Benutzer", "password_reset_btn": "Senden Sie eine E-Mail zum Zurücksetzen des Kennworts an diesen Benutzer",
"password_lost_sent_title": "Passwort zurücksetzen Email gesendet", "password_lost_sent_title": "Passwort-zurücksetzen-E-Mail gesendet",
"password_lost_sent_message": "Passwort zurücksetzen Email an <strong>{{ username }}</strong> gesendet.", "password_lost_sent_message": "Passwort-zurücksetzen-E-Mail an <strong>{{ username }}</strong> gesendet.",
"disable_totp_btn": "Zwei-Faktor-Authentifizierung für diesen Benutzer deaktivieren", "disable_totp_btn": "Zwei-Faktor-Authentifizierung für diesen Benutzer deaktivieren",
"disable_totp_title": "Zwei-Faktor-Authentifizierung deaktivieren", "disable_totp_title": "Zwei-Faktor-Authentifizierung deaktivieren",
"disable_totp_message": "Sind Sie sicher, dass sie die Zwei-Faktor-Authentifizierung für den Benutzer deaktivieren möchten?" "disable_totp_message": "Sind Sie sicher, dass sie die Zwei-Faktor-Authentifizierung für den Benutzer deaktivieren möchten?"
@@ -319,7 +319,7 @@
"edit": { "edit": {
"delete_workflow_title": "Workflow löschen", "delete_workflow_title": "Workflow löschen",
"delete_workflow_message": "Möchten Sie diesen Workflow wirklich löschen? Derzeit ausgeführte Workflows werden nicht gelöscht", "delete_workflow_message": "Möchten Sie diesen Workflow wirklich löschen? Derzeit ausgeführte Workflows werden nicht gelöscht",
"edit_workflow_title": "Bearbeiten \"{{ name }}\"", "edit_workflow_title": "Workflow \"{{ name }}\" bearbeiten",
"add_workflow_title": "Neuen Workflow hinzufügen", "add_workflow_title": "Neuen Workflow hinzufügen",
"name": "Name", "name": "Name",
"name_placeholder": "Name des Bearbeitungschritts oder der Beschreibung", "name_placeholder": "Name des Bearbeitungschritts oder der Beschreibung",
@@ -328,8 +328,8 @@
"type_approve": "Genehmigen", "type_approve": "Genehmigen",
"type_validate": "Bestätigen", "type_validate": "Bestätigen",
"target": "Zugewiesen an", "target": "Zugewiesen an",
"target_help": "<strong>Zulassen:</strong> Überprüfen und fortsetzen des Workflows <br/><strong>Genehmigen:</strong> Übernehmen oder lehnen Sie die Überprüfung ab", "target_help": "<strong>Zulassen:</strong> Überprüfen und fortsetzen des Workflows<br/><strong>Genehmigen:</strong> Übernehmen oder lehnen Sie die Überprüfung ab",
"add_step": "Workflow Schritt hinzufügen", "add_step": "Workflow-Schritt hinzufügen",
"actions": "Was passiert danach?", "actions": "Was passiert danach?",
"remove_action": "Aktion entfernen", "remove_action": "Aktion entfernen",
"acl_info": "Nur hier definierte Benutzer und Gruppen können diesen Workflow für ein Dokument starten" "acl_info": "Nur hier definierte Benutzer und Gruppen können diesen Workflow für ein Dokument starten"
@@ -339,18 +339,18 @@
"enable_totp": "Zwei-Faktor-Authentifizierung aktivieren", "enable_totp": "Zwei-Faktor-Authentifizierung aktivieren",
"enable_totp_message": "Stellen Sie sicher, dass Sie eine TOTP-kompatible Anwendung auf Ihrem Telefon haben, die bereit ist, ein neues Konto hinzuzufügen.", "enable_totp_message": "Stellen Sie sicher, dass Sie eine TOTP-kompatible Anwendung auf Ihrem Telefon haben, die bereit ist, ein neues Konto hinzuzufügen.",
"title": "Zwei-Faktor-Authentifizierung", "title": "Zwei-Faktor-Authentifizierung",
"message_1": "Die Zwei-Faktor-Authentifizierung ermöglicht Ihnen eine weitere Absicherung Ihres {{ appName }} Benutzerkontos. Bevor Sie diese Funktion aktivieren, stellen Sie sicher, dass Sie eine TOTP-kompatible Anwendung auf Ihrem Telefon haben:", "message_1": "Die Zwei-Faktor-Authentifizierung ermöglicht Ihnen eine weitere Absicherung Ihres {{ appName }}-Benutzerkontos. Bevor Sie diese Funktion aktivieren, stellen Sie sicher, dass Sie eine TOTP-kompatible Anwendung auf Ihrem Telefon haben:",
"message_google_authenticator": "Für Android, iOS, und Blackberry: <a href=\"https://support.google.com/accounts/answer/1066447\" target=\"_blank\">Google Authenticator</a>", "message_google_authenticator": "Für Android, iOS, und Blackberry: <a href=\"https://support.google.com/accounts/answer/1066447\" target=\"_blank\">Google Authenticator</a>",
"message_duo_mobile": "Für Android und iOS: <a href=\"https://guide.duo.com/third-party-accounts\" target=\"_blank\">Duo Mobile</a>", "message_duo_mobile": "Für Android und iOS: <a href=\"https://guide.duo.com/third-party-accounts\" target=\"_blank\">Duo Mobile</a>",
"message_authenticator": "Für Windows Phone: <a href=\"https://www.microsoft.com/en-US/store/apps/Authenticator/9WZDNCRFJ3RJ\" target=\"_blank\">Authenticator</a>", "message_authenticator": "Für Windows Phone: <a href=\"https://www.microsoft.com/en-US/store/apps/Authenticator/9WZDNCRFJ3RJ\" target=\"_blank\">Authenticator</a>",
"message_2": "Diese Anwendungen generieren automatisch einen Validierungscode, der sich nach einer gewissen Zeitspanne ändert. Sie müssen diesen Validierungscode jedes Mal eingeben, wenn Sie sich bei {{ appName }} anmelden. </strong>.", "message_2": "Diese Anwendungen generieren automatisch einen Validierungscode, der sich nach einer gewissen Zeitspanne ändert. Sie müssen diesen Validierungscode jedes Mal eingeben, wenn Sie sich bei {{ appName }} anmelden</strong>.",
"secret_key": "Ihr geheimer Schlüssel lautet: <strong>{{ secret }}</strong>", "secret_key": "Ihr geheimer Schlüssel lautet: <strong>{{ secret }}</strong>",
"secret_key_warning": "Konfigurieren Sie Ihre TOTP-App jetzt mit diesem geheimen Schlüssel auf Ihrem Telefon. Sie können später nicht mehr darauf zugreifen.", "secret_key_warning": "Konfigurieren Sie Ihre TOTP-App jetzt mit diesem geheimen Schlüssel auf Ihrem Telefon. Sie können später nicht mehr auf diesen Schlüssel zugreifen.",
"totp_enabled_message": "Die Zwei-Faktor-Authentifizierung ist in Ihrem Konto aktiviert.<br/>Bei jeder Anmeldung auf <strong>{{ appName }}</strong>, werden Sie in Ihrer konfigurierten Telefon-App nach einem Bestätigungscode gefragt.<br/>Wenn Sie Ihr Telefon verlieren, können Sie sich nicht in Ihrem Konto anmelden, aber aktive Sitzungen ermöglichen es Ihnen, einen geheimen Schlüssel neu zu generieren.", "totp_enabled_message": "Die Zwei-Faktor-Authentifizierung ist in Ihrem Konto aktiviert.<br/>Bei jeder Anmeldung auf <strong>{{ appName }}</strong> werden Sie in Ihrer konfigurierten Telefon-App nach einem Bestätigungscode gefragt.<br/>Wenn Sie Ihr Telefon verlieren, können Sie sich nicht in Ihrem Konto anmelden, aber aktive Sitzungen ermöglichen es Ihnen, einen geheimen Schlüssel neu zu generieren.",
"disable_totp": { "disable_totp": {
"disable_totp": "Deaktivieren der Zwei-Faktor-Authentifizierung", "disable_totp": "Deaktivieren der Zwei-Faktor-Authentifizierung",
"message": "Ihr Konto wird nicht mehr durch die Zwei-Faktor-Authentifizierung geschützt.", "message": "Ihr Konto wird nicht mehr durch die Zwei-Faktor-Authentifizierung geschützt.",
"confirm_password": "Bestätigen Sie ihr Passwort", "confirm_password": "Bestätigen Sie Ihr Passwort",
"submit": "Deaktivieren der Zwei-Faktor-Authentifizierung" "submit": "Deaktivieren der Zwei-Faktor-Authentifizierung"
}, },
"test_totp": "Bitte geben Sie den auf Ihrem Telefon angezeigten Validierungscode ein:", "test_totp": "Bitte geben Sie den auf Ihrem Telefon angezeigten Validierungscode ein:",
@@ -365,13 +365,13 @@
"delete_group_title": "Gruppe löschen", "delete_group_title": "Gruppe löschen",
"delete_group_message": "Wollen Sie diese Gruppe wirklich löschen?", "delete_group_message": "Wollen Sie diese Gruppe wirklich löschen?",
"edit_group_failed_title": "Gruppe existiert bereits", "edit_group_failed_title": "Gruppe existiert bereits",
"edit_group_failed_message": "Dieser Gruppenname wird bereits von einer anderen Gruppe übernommen", "edit_group_failed_message": "Dieser Gruppenname wird bereits von einer anderen Gruppe verwendet",
"group_used_title": "Gruppe in Verwendung", "group_used_title": "Gruppe in Verwendung",
"group_used_message": "Diese Gruppe wird im Workflow \"{{ name }}\" verwendet", "group_used_message": "Diese Gruppe wird im Workflow \"{{ name }}\" verwendet",
"edit_group_title": "Bearbeiten \"{{ name }}\"", "edit_group_title": "Gruppe \"{{ name }}\" bearbeiten",
"add_group_title": "Neue Gruppe hinzufügen", "add_group_title": "Neue Gruppe hinzufügen",
"name": "Name", "name": "Name",
"parent_group": "Übergruppe", "parent_group": "Übergeordnete Gruppe",
"search_group": "Gruppe suchen", "search_group": "Gruppe suchen",
"members": "Mitglieder", "members": "Mitglieder",
"new_member": "Neue Mitglieder", "new_member": "Neue Mitglieder",
@@ -386,7 +386,7 @@
}, },
"config": { "config": {
"title_guest_access": "Gastzugang", "title_guest_access": "Gastzugang",
"message_guest_access": "Der Gastzugang ist ein Modus, in dem jeder auf {{appName}} ohne Kennwort zugreifen kann. <br/> Wie ein normaler Benutzer kann der Gastbenutzer nur auf seine Dokumente und diejenigen zugreifen, auf die er über Berechtigungen zugreifen kann.<br/>", "message_guest_access": "Der Gastzugang ist ein Modus, in dem jeder auf {{appName}} ohne Kennwort zugreifen kann.<br/>Wie ein normaler Benutzer kann der Gastbenutzer nur auf seine Dokumente und diejenigen zugreifen, auf die er über Berechtigungen zugreifen kann.<br/>",
"enable_guest_access": "Gastzugang aktivieren", "enable_guest_access": "Gastzugang aktivieren",
"disable_guest_access": "Gastzugang deaktivieren", "disable_guest_access": "Gastzugang deaktivieren",
"title_theme": "Aussehen anpassen", "title_theme": "Aussehen anpassen",
@@ -399,13 +399,13 @@
"logo": "Logo (quadratische Größe)", "logo": "Logo (quadratische Größe)",
"background_image": "Hintergrundbild", "background_image": "Hintergrundbild",
"uploading_image": "Bild hochladen...", "uploading_image": "Bild hochladen...",
"title_smtp": "SMTP Email Einstellungen <small>für das Zürucksetzen des Passworts</small>", "title_smtp": "SMTP-E-Mail-Einstellungen <small>für das Zurücksetzen des Passworts</small>",
"smtp_hostname": "SMTP Server", "smtp_hostname": "SMTP-Server",
"smtp_port": "SMTP Port", "smtp_port": "SMTP-Port",
"smtp_from": "Absender E-Mail", "smtp_from": "Absender-E-Mail",
"smtp_username": "SMTP Benutzername", "smtp_username": "SMTP-Benutzername",
"smtp_password": "SMTP Passwort", "smtp_password": "SMTP-Passwort",
"smtp_updated": "SMTP Konfiguration erfolgreich aktualisiert", "smtp_updated": "SMTP-Konfiguration erfolgreich aktualisiert",
"webhooks": "Webhooks", "webhooks": "Webhooks",
"webhooks_explain": "Webhooks werden aufgerufen, wenn das angegebene Ereignis eintritt. Die angegebene URL wird mit einer JSON-Payload gepostet, die den Ereignisnamen und die ID der betreffenden Ressource enthält.", "webhooks_explain": "Webhooks werden aufgerufen, wenn das angegebene Ereignis eintritt. Die angegebene URL wird mit einer JSON-Payload gepostet, die den Ereignisnamen und die ID der betreffenden Ressource enthält.",
"webhook_event": "Ereignisse", "webhook_event": "Ereignisse",
@@ -416,35 +416,35 @@
"metadata": { "metadata": {
"title": "Konfiguration benutzerdefinierter Metadaten", "title": "Konfiguration benutzerdefinierter Metadaten",
"message": "Hier können Sie Ihren Dokumenten benutzerdefinierte Metadaten wie eine interne Kennung oder ein Ablaufdatum hinzufügen. Bitte beachten Sie, dass der Metadatentyp nach der Erstellung nicht mehr geändert werden kann.", "message": "Hier können Sie Ihren Dokumenten benutzerdefinierte Metadaten wie eine interne Kennung oder ein Ablaufdatum hinzufügen. Bitte beachten Sie, dass der Metadatentyp nach der Erstellung nicht mehr geändert werden kann.",
"name": "Metadatensatz Name", "name": "Metadatensatz-Name",
"type": "Metadatensatz Typ" "type": "Metadatensatz-Typ"
}, },
"inbox": { "inbox": {
"title": "Posteingang durchsuchen", "title": "Posteingang durchsuchen",
"message": "Wenn Sie diese Funktion aktivieren, durchsucht das System den angegebenen Posteingang jede Minute nach <strong>ungelesenen</strong> E-Mails und importiert diese automatisch.<br/>Nach dem Import einer E-Mail wird diese als gelesen markiert.<br/>Folgen Sie den Links zu Konfigurationseinstellungen für <a href=\"https://support.google.com/mail/answer/7126229?hl=en\" target=\"_blank\">Gmail</a>, <a href=\"https://support.office.com/en-us/article/pop-imap-and-smtp-settings-for-outlook-com-d088b986-291d-42b8-9564-9c414e2aa040\" target=\"_blank\">Outlook.com</a>, <a href=\"https://help.yahoo.com/kb/SLN4075.html\" target=\"_blank\">Yahoo</a>.", "message": "Wenn Sie diese Funktion aktivieren, durchsucht das System den angegebenen Posteingang jede Minute nach <strong>ungelesenen</strong> E-Mails und importiert diese automatisch.<br/>Nach dem Import einer E-Mail wird diese als gelesen markiert.<br/>Folgen Sie den Links zu Konfigurationseinstellungen für <a href=\"https://support.google.com/mail/answer/7126229?hl=en\" target=\"_blank\">Gmail</a>, <a href=\"https://support.office.com/en-us/article/pop-imap-and-smtp-settings-for-outlook-com-d088b986-291d-42b8-9564-9c414e2aa040\" target=\"_blank\">Outlook.com</a>, <a href=\"https://help.yahoo.com/kb/SLN4075.html\" target=\"_blank\">Yahoo</a>.",
"enabled": "Durchsuchen des Posteingangs aktivieren", "enabled": "Durchsuchen des Posteingangs aktivieren",
"hostname": "IMAP Server", "hostname": "IMAP-Server",
"port": "IMAP Port (143 oder 993)", "port": "IMAP-Port (143 oder 993)",
"username": "IMAP Benutzername", "username": "IMAP-Benutzername",
"password": "IMAP Passwort", "password": "IMAP-Passwort",
"folder": "IMAP Ordner", "folder": "IMAP-Ordner",
"tag": "Folgenden Tag zu importierten Dokumenten hinzufügen", "tag": "Folgendes Tag zu importierten Dokumenten hinzufügen",
"test": "Konfiguration testen", "test": "Konfiguration testen",
"last_sync": "Letzte Synchronisation: {{ data.date | date: 'medium' }}, {{ data.count }} E-Mail(s){{ data.count > 1 ? 's' : '' }} importiert", "last_sync": "Letzte Synchronisation: {{ data.date | date: 'medium' }}, {{ data.count }} {{ data.count > 1 ? 'E-Mails' : 'E-Mail' }} importiert",
"test_success": "Die Verbindung zum Posteingang war erfolgreich ({{ count }} <strong>unread</strong> message{{ count > 1 ? 's' : '' }})", "test_success": "Die Verbindung zum Posteingang war erfolgreich ({{ count }} <strong>ungelesene</strong> {{ count > 1 ? 'Nachrichten' : 'Nachricht' }})",
"test_fail": "Beim Verbinden mit dem Posteingang ist ein Fehler aufgetreten, bitte überprüfen Sie die Einstellungen", "test_fail": "Beim Verbinden mit dem Posteingang ist ein Fehler aufgetreten, bitte überprüfen Sie die Einstellungen",
"saved": "IMAP Konfiguration erfolgreich gespeichert" "saved": "IMAP-Konfiguration erfolgreich gespeichert"
}, },
"monitoring": { "monitoring": {
"background_tasks": "Hintergrundaufgaben", "background_tasks": "Hintergrundaufgaben",
"queued_tasks": "Es gibt derzeit {{ count }} anstehende Tasks.", "queued_tasks": "Es gibt derzeit {{ count }} {{ count > 1 ? 'anstehende Aufgaben' : 'anstehende Aufgabe' }}.",
"queued_tasks_explain": "Dateiverarbeitung, Thumbnail-Erstellung, Index-Update, optische Zeichenerkennung sind Hintergrundaufgaben. Eine große Anzahl unbearbeiteter Aufgaben führt zu unvollständigen Suchergebnissen.", "queued_tasks_explain": "Dateiverarbeitung, Thumbnail-Erstellung, Index-Update, optische Zeichenerkennung sind Hintergrundaufgaben. Eine große Anzahl unbearbeiteter Aufgaben führt zu unvollständigen Suchergebnissen.",
"server_logs": "Server Logs", "server_logs": "Server-Logs",
"log_date": "Datum", "log_date": "Datum",
"log_tag": "Tag", "log_tag": "Tag",
"log_message": "Nachricht", "log_message": "Nachricht",
"indexing": "Indexierung", "indexing": "Indexierung",
"indexing_info": "Wenn Sie Unstimmigkeiten in den Suchergebnissen feststellen, können Sie versuchen, eine vollständige Neuindizierung durchzuführen. Die Suchergebnisse sind bis zum Abschluss dieser Operation unvollständig.", "indexing_info": "Wenn Sie Unstimmigkeiten in den Suchergebnissen feststellen, können Sie versuchen, eine vollständige Neuindizierung durchzuführen. Die Suchergebnisse sind bis zum Abschluss dieser Aufgabe unvollständig.",
"start_reindexing": "Vollständige Neuindizierung starten", "start_reindexing": "Vollständige Neuindizierung starten",
"reindexing_started": "Neuindizierung wurde gestartet, bitte warten Sie, bis es keine Hintergrundaufgaben mehr gibt." "reindexing_started": "Neuindizierung wurde gestartet, bitte warten Sie, bis es keine Hintergrundaufgaben mehr gibt."
}, },
@@ -469,14 +469,14 @@
"new_entry": "Neuer Eintrag" "new_entry": "Neuer Eintrag"
}, },
"fileimporter": { "fileimporter": {
"title": "Massen Datei Importer", "title": "Massen-Datei-Importer",
"advanced_users": "Für fortgeschrittene Benutzer!", "advanced_users": "Für fortgeschrittene Benutzer!",
"need_intro": "Wenn Sie:", "need_intro": "Wenn Sie:",
"need_1": "Ganze Verzeichnisse von Dateien auf einmal importieren möchten", "need_1": "Ganze Verzeichnisse von Dateien auf einmal importieren möchten",
"need_2": "Ein Verzeichnis nach neuen Dateien durchsuchen lassen und gefunden Dateien importieren lassen möchten", "need_2": "Ein Verzeichnis nach neuen Dateien durchsuchen lassen und gefunden Dateien importieren lassen möchten",
"line_1": "Gehen Sie zu <a href=\"https://github.com/sismics/docs/releases\">sismics/docs/releases</a> und laden Sie das Datei-Importer-Tool für Ihr System herunter.", "line_1": "Gehen Sie zu <a href=\"https://github.com/sismics/docs/releases\">sismics/docs/releases</a> und laden Sie das Datei-Importer-Tool für Ihr System herunter.",
"line_2": "Folgen Sie den <a href=\"https://github.com/sismics/docs/tree/master/docs-importer\">Anweisungen</a>, um das Import-Tool zu nutzen.", "line_2": "Folgen Sie den <a href=\"https://github.com/sismics/docs/tree/master/docs-importer\">Anweisungen</a>, um das Import-Tool zu nutzen.",
"line_3": "Ihre Dateien werden in <a href=\"#/document\">Modus 'Schnelles Hochladen'</a> importiert. Danach können Sie die Dateien weiterbearbeiten und Dokumenten zuordnen oder Dokumente erstellen.", "line_3": "Ihre Dateien werden im <a href=\"#/document\">Modus 'Schnelles Hochladen'</a> importiert. Danach können Sie die Dateien weiterbearbeiten und Dokumenten zuordnen oder Dokumente erstellen.",
"download": "Herunterladen", "download": "Herunterladen",
"instructions": "Anweisungen" "instructions": "Anweisungen"
} }
@@ -522,14 +522,14 @@
"Webhook": "Webhook" "Webhook": "Webhook"
}, },
"selectrelation": { "selectrelation": {
"typeahead": "Tippen Sie einen Dokumentnamen ein" "typeahead": "Geben Sie einen Dokumentnamen ein"
}, },
"selecttag": { "selecttag": {
"typeahead": "Tippen Sie einen Tagnamen ein" "typeahead": "Geben Sie einen Tagnamen ein"
}, },
"datepicker": { "datepicker": {
"current": "Heute", "current": "Heute",
"clear": "Bereinigen", "clear": "Leeren",
"close": "Erledigt" "close": "Erledigt"
} }
}, },
@@ -579,7 +579,7 @@
"onboarding": { "onboarding": {
"step1": { "step1": {
"title": "Das erste Mal?", "title": "Das erste Mal?",
"description": "Wenn Sie Teedy zum ersten Mal nutzen, klicken Sie auf die Schaltfläche Weiter. Andernfalls können Sie mich schließen." "description": "Wenn Sie Teedy zum ersten Mal nutzen, klicken Sie auf die Schaltfläche \"Weiter\". Andernfalls können Sie diese Box schließen."
}, },
"step2": { "step2": {
"title": "Dokumente", "title": "Dokumente",
@@ -604,7 +604,7 @@
"cancel": "Abbrechen", "cancel": "Abbrechen",
"share": "Teilen", "share": "Teilen",
"unshare": "Nicht mehr teilen", "unshare": "Nicht mehr teilen",
"close": "Schliessen", "close": "Schließen",
"add": "Hinzufügen", "add": "Hinzufügen",
"open": "Öffnen", "open": "Öffnen",
"see": "Ansehen", "see": "Ansehen",
@@ -614,7 +614,7 @@
"delete": "Löschen", "delete": "Löschen",
"rename": "Umbenennen", "rename": "Umbenennen",
"download": "Herunterladen", "download": "Herunterladen",
"loading": "Lädt...", "loading": "Lädt\u2026",
"send": "Absenden", "send": "Absenden",
"enabled": "Aktiviert", "enabled": "Aktiviert",
"disabled": "Deaktiviert" "disabled": "Deaktiviert"

View File

@@ -25,7 +25,7 @@
"message": "Please enter a new password", "message": "Please enter a new password",
"submit": "Change my password", "submit": "Change my password",
"error_title": "Error changing your password", "error_title": "Error changing your password",
"error_message": "Your password recovery request is expired, please ask a new one on the login page" "error_message": "Your password recovery request is expired, please ask for a new one on the login page"
}, },
"index": { "index": {
"toggle_navigation": "Toggle navigation", "toggle_navigation": "Toggle navigation",
@@ -360,7 +360,7 @@
"message_2": "Those applications automatically generate a validation code that changes after a certain period of time.<br/>You will be required to enter this validation code each time you login on <strong>{{ appName }}</strong>.", "message_2": "Those applications automatically generate a validation code that changes after a certain period of time.<br/>You will be required to enter this validation code each time you login on <strong>{{ appName }}</strong>.",
"secret_key": "Your secret key is: <strong>{{ secret }}</strong>", "secret_key": "Your secret key is: <strong>{{ secret }}</strong>",
"secret_key_warning": "Configure your TOTP app on your phone with this secret key now, you will not be able to access it later.", "secret_key_warning": "Configure your TOTP app on your phone with this secret key now, you will not be able to access it later.",
"totp_enabled_message": "Two-factor authentication is enabled on your account.<br/>Each time you login on <strong>{{ appName }}</strong>, you will be asked a validation code from your configured phone app.<br/>If you lose your phone, you will not be able to login into your account but active sessions will allow you to regenerate a secrey key.", "totp_enabled_message": "Two-factor authentication is enabled on your account.<br/>Each time you login on <strong>{{ appName }}</strong>, you will be asked for a validation code from your configured phone app.<br/>If you lose your phone, you will not be able to login into your account but active sessions will allow you to regenerate a secrey key.",
"disable_totp": { "disable_totp": {
"disable_totp": "Disable two-factor authentication", "disable_totp": "Disable two-factor authentication",
"message": "Your account will not be protected by the two-factor authentication anymore.", "message": "Your account will not be protected by the two-factor authentication anymore.",
@@ -509,7 +509,7 @@
"error_general": "An error occurred while trying to import your file, please make sure it is a valid EML file" "error_general": "An error occurred while trying to import your file, please make sure it is a valid EML file"
}, },
"app_share": { "app_share": {
"main": "Ask a shared document link to access it", "main": "Ask for a shared document link to access it",
"403": { "403": {
"title": "Not authorized", "title": "Not authorized",
"message": "The document you are trying to view is not shared anymore" "message": "The document you are trying to view is not shared anymore"

View File

@@ -41,8 +41,8 @@
img-error="error = true" img-error="error = true"
ng-show="!error && canDisplayPreview()" /> ng-show="!error && canDisplayPreview()" />
<!-- Video player --> <!-- Media player -->
<a href class="video-overlay" ng-if="!error && file.mimetype.substring(0, 6) == 'video/'" <a href class="video-overlay" ng-if="!error && (file.mimetype.substring(0, 6) == 'video/' || file.mimetype.substring(0, 6) == 'audio/')"
ng-init="videoPlayer = false" ng-click="videoPlayer = true"> ng-init="videoPlayer = false" ng-click="videoPlayer = true">
<span class="fas fa-play-circle" ng-if="!videoPlayer"></span> <span class="fas fa-play-circle" ng-if="!videoPlayer"></span>
<video ng-if="videoPlayer" autoplay="autoplay" loop="loop" <video ng-if="videoPlayer" autoplay="autoplay" loop="loop"

View File

@@ -9,7 +9,7 @@
<label class="col-sm-2 control-label" for="inputUserUsername">{{ 'settings.user.edit.username' | translate }}</label> <label class="col-sm-2 control-label" for="inputUserUsername">{{ 'settings.user.edit.username' | translate }}</label>
<div class="col-sm-7"> <div class="col-sm-7">
<input name="userUsername" type="text" id="inputUserUsername" required ng-disabled="isEdit()" class="form-control" <input name="userUsername" type="text" id="inputUserUsername" required ng-disabled="isEdit()" class="form-control"
ng-pattern="/^[a-zA-Z0-9_]*$/" ng-pattern="/^[a-zA-Z0-9_@\.]*$/"
ng-minlength="3" ng-maxlength="50" ng-attr-placeholder="{{ 'settings.user.edit.username' | translate }}" ng-model="user.username"/> ng-minlength="3" ng-maxlength="50" ng-attr-placeholder="{{ 'settings.user.edit.username' | translate }}" ng-model="user.username"/>
</div> </div>

View File

@@ -9,3 +9,4 @@ log4j.logger.com.sismics=INFO
log4j.logger.org.apache.pdfbox=ERROR log4j.logger.org.apache.pdfbox=ERROR
log4j.logger.org.glassfish.jersey.servlet.WebComponent=ERROR log4j.logger.org.glassfish.jersey.servlet.WebComponent=ERROR
log4j.logger.org.apache.directory=ERROR log4j.logger.org.apache.directory=ERROR
log4j.logger.org.odftoolkit=ERROR

View File

@@ -1,3 +0,0 @@
api.current_version=${project.version}
api.min_version=1.0
db.version=27

View File

@@ -1 +0,0 @@
\ugggg

View File

@@ -1,8 +0,0 @@
log4j.rootCategory=WARN, CONSOLE, MEMORY
log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender
log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout
log4j.appender.CONSOLE.layout.ConversionPattern=%d{DATE} %p %l %m %n
log4j.appender.MEMORY=com.sismics.util.log4j.MemoryAppender
log4j.appender.MEMORY.size=1000
log4j.logger.com.sismics=DEBUG

View File

@@ -99,7 +99,7 @@ public class TestAuditLogResource extends BaseJerseyTest {
long update1Date = json.getJsonNumber("update_date").longValue(); long update1Date = json.getJsonNumber("update_date").longValue();
// Add a file to the document // Add a file to the document
clientUtil.addFileToDocument("file/wikipedia.pdf", "wikipedia.pdf", auditlog1Token, document1Id); clientUtil.addFileToDocument(FILE_WIKIPEDIA_PDF, auditlog1Token, document1Id);
// Get document 1 // Get document 1
json = target().path("/document/" + document1Id).request() json = target().path("/document/" + document1Id).request()

View File

@@ -4,8 +4,6 @@ import com.google.common.io.ByteStreams;
import com.google.common.io.Resources; import com.google.common.io.Resources;
import com.sismics.docs.core.util.DirectoryUtil; import com.sismics.docs.core.util.DirectoryUtil;
import com.sismics.util.filter.TokenBasedSecurityFilter; import com.sismics.util.filter.TokenBasedSecurityFilter;
import com.sismics.util.mime.MimeType;
import com.sismics.util.mime.MimeTypeUtil;
import org.glassfish.jersey.media.multipart.FormDataMultiPart; import org.glassfish.jersey.media.multipart.FormDataMultiPart;
import org.glassfish.jersey.media.multipart.MultiPartFeature; import org.glassfish.jersey.media.multipart.MultiPartFeature;
import org.glassfish.jersey.media.multipart.file.StreamDataBodyPart; import org.glassfish.jersey.media.multipart.file.StreamDataBodyPart;
@@ -96,8 +94,8 @@ public class TestDocumentResource extends BaseJerseyTest {
Assert.assertNotNull(document2Id); Assert.assertNotNull(document2Id);
// Add a file // Add a file
String file1Id = clientUtil.addFileToDocument("file/Einstein-Roosevelt-letter.png", String file1Id = clientUtil.addFileToDocument(FILE_EINSTEIN_ROOSEVELT_LETTER_PNG,
"Einstein-Roosevelt-letter.png", document1Token, document1Id); document1Token, document1Id);
// Share this document // Share this document
target().path("/share").request() target().path("/share").request()
@@ -143,7 +141,7 @@ public class TestDocumentResource extends BaseJerseyTest {
json = target().path("/document").request() json = target().path("/document").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, document3Token) .cookie(TokenBasedSecurityFilter.COOKIE_NAME, document3Token)
.put(Entity.form(new Form() .put(Entity.form(new Form()
.param("title", "My super title document 3") .param("title", "My_super_title_document_3")
.param("description", "My super description for document 3") .param("description", "My super description for document 3")
.param("language", "eng") .param("language", "eng")
.param("create_date", Long.toString(create3Date))), JsonObject.class); .param("create_date", Long.toString(create3Date))), JsonObject.class);
@@ -151,8 +149,8 @@ public class TestDocumentResource extends BaseJerseyTest {
Assert.assertNotNull(document3Id); Assert.assertNotNull(document3Id);
// Add a file // Add a file
clientUtil.addFileToDocument("file/Einstein-Roosevelt-letter.png", clientUtil.addFileToDocument(FILE_EINSTEIN_ROOSEVELT_LETTER_PNG,
"Einstein-Roosevelt-letter.png", document3Token, document3Id); document3Token, document3Id);
// List all documents from document3 // List all documents from document3
json = target().path("/document/list") json = target().path("/document/list")
@@ -217,6 +215,7 @@ public class TestDocumentResource extends BaseJerseyTest {
Assert.assertEquals(1, searchDocuments("mime:image/png", document1Token)); Assert.assertEquals(1, searchDocuments("mime:image/png", document1Token));
Assert.assertEquals(0, searchDocuments("mime:empty/void", document1Token)); Assert.assertEquals(0, searchDocuments("mime:empty/void", document1Token));
Assert.assertEquals(1, searchDocuments("after:2010 before:2040-08 tag:super shared:yes lang:eng simple:title simple:description full:uranium", document1Token)); Assert.assertEquals(1, searchDocuments("after:2010 before:2040-08 tag:super shared:yes lang:eng simple:title simple:description full:uranium", document1Token));
Assert.assertEquals(1, searchDocuments("title:My_super_title_document_3", document3Token));
// Search documents (nothing) // Search documents (nothing)
Assert.assertEquals(0, searchDocuments("random", document1Token)); Assert.assertEquals(0, searchDocuments("random", document1Token));
@@ -228,6 +227,7 @@ public class TestDocumentResource extends BaseJerseyTest {
Assert.assertEquals(0, searchDocuments("before:2040-05-38", document1Token)); Assert.assertEquals(0, searchDocuments("before:2040-05-38", document1Token));
Assert.assertEquals(0, searchDocuments("tag:Nop", document1Token)); Assert.assertEquals(0, searchDocuments("tag:Nop", document1Token));
Assert.assertEquals(0, searchDocuments("lang:fra", document1Token)); Assert.assertEquals(0, searchDocuments("lang:fra", document1Token));
Assert.assertEquals(0, searchDocuments("title:Unknown title", document3Token));
// Get document 1 // Get document 1
json = target().path("/document/" + document1Id).request() json = target().path("/document/" + document1Id).request()
@@ -262,6 +262,7 @@ public class TestDocumentResource extends BaseJerseyTest {
Assert.assertEquals(document2Id, relations.getJsonObject(0).getString("id")); Assert.assertEquals(document2Id, relations.getJsonObject(0).getString("id"));
Assert.assertFalse(relations.getJsonObject(0).getBoolean("source")); Assert.assertFalse(relations.getJsonObject(0).getBoolean("source"));
Assert.assertEquals("My super title document 2", relations.getJsonObject(0).getString("title")); Assert.assertEquals("My super title document 2", relations.getJsonObject(0).getString("title"));
Assert.assertFalse(json.containsKey("files"));
// Get document 2 // Get document 2
json = target().path("/document/" + document2Id).request() json = target().path("/document/" + document2Id).request()
@@ -273,6 +274,7 @@ public class TestDocumentResource extends BaseJerseyTest {
Assert.assertEquals(document1Id, relations.getJsonObject(0).getString("id")); Assert.assertEquals(document1Id, relations.getJsonObject(0).getString("id"));
Assert.assertTrue(relations.getJsonObject(0).getBoolean("source")); Assert.assertTrue(relations.getJsonObject(0).getBoolean("source"));
Assert.assertEquals("My super title document 1", relations.getJsonObject(0).getString("title")); Assert.assertEquals("My super title document 1", relations.getJsonObject(0).getString("title"));
Assert.assertFalse(json.containsKey("files"));
// Create a tag // Create a tag
json = target().path("/tag").request() json = target().path("/tag").request()
@@ -328,6 +330,25 @@ public class TestDocumentResource extends BaseJerseyTest {
.get(JsonObject.class); .get(JsonObject.class);
documents = json.getJsonArray("documents"); documents = json.getJsonArray("documents");
Assert.assertEquals(1, documents.size()); Assert.assertEquals(1, documents.size());
Assert.assertEquals(document1Id, documents.getJsonObject(0).getString("id"));
Assert.assertFalse(documents.getJsonObject(0).containsKey("files"));
// Search documents by query with files
json = target().path("/document/list")
.queryParam("files", true)
.queryParam("search", "new")
.request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, document1Token)
.get(JsonObject.class);
documents = json.getJsonArray("documents");
Assert.assertEquals(1, documents.size());
Assert.assertEquals(1, documents.size());
Assert.assertEquals(document1Id, documents.getJsonObject(0).getString("id"));
JsonArray files = documents.getJsonObject(0).getJsonArray("files");
Assert.assertEquals(1, files.size());
Assert.assertEquals(file1Id, files.getJsonObject(0).getString("id"));
Assert.assertEquals("Einstein-Roosevelt-letter.png", files.getJsonObject(0).getString("name"));
Assert.assertEquals("image/png", files.getJsonObject(0).getString("mimetype"));
// Get document 1 // Get document 1
json = target().path("/document/" + document1Id).request() json = target().path("/document/" + document1Id).request()
@@ -351,6 +372,19 @@ public class TestDocumentResource extends BaseJerseyTest {
Assert.assertEquals("document1", contributors.getJsonObject(0).getString("username")); Assert.assertEquals("document1", contributors.getJsonObject(0).getString("username"));
relations = json.getJsonArray("relations"); relations = json.getJsonArray("relations");
Assert.assertEquals(0, relations.size()); Assert.assertEquals(0, relations.size());
Assert.assertFalse(json.containsKey("files"));
// Get document 1 with its files
json = target().path("/document/" + document1Id)
.queryParam("files", true)
.request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, document1Token)
.get(JsonObject.class);
files = json.getJsonArray("files");
Assert.assertEquals(1, files.size());
Assert.assertEquals(file1Id, files.getJsonObject(0).getString("id"));
Assert.assertEquals("Einstein-Roosevelt-letter.png", files.getJsonObject(0).getString("name"));
Assert.assertEquals("image/png", files.getJsonObject(0).getString("mimetype"));
// Get document 2 // Get document 2
json = target().path("/document/" + document1Id).request() json = target().path("/document/" + document1Id).request()
@@ -365,6 +399,12 @@ public class TestDocumentResource extends BaseJerseyTest {
.delete(JsonObject.class); .delete(JsonObject.class);
Assert.assertEquals("ok", json.getString("status")); Assert.assertEquals("ok", json.getString("status"));
// Deletes a non-existing document
response = target().path("/document/69b79238-84bb-4263-a32f-9cbdf8c92188").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, document1Token)
.delete();
Assert.assertEquals(Status.NOT_FOUND, Status.fromStatusCode(response.getStatus()));
// Check that the associated files are deleted from FS // Check that the associated files are deleted from FS
java.io.File storedFile = DirectoryUtil.getStorageDirectory().resolve(file1Id).toFile(); java.io.File storedFile = DirectoryUtil.getStorageDirectory().resolve(file1Id).toFile();
java.io.File webFile = DirectoryUtil.getStorageDirectory().resolve(file1Id + "_web").toFile(); java.io.File webFile = DirectoryUtil.getStorageDirectory().resolve(file1Id + "_web").toFile();
@@ -408,22 +448,13 @@ public class TestDocumentResource extends BaseJerseyTest {
String documentOdtToken = clientUtil.login("document_odt"); String documentOdtToken = clientUtil.login("document_odt");
// Create a document // Create a document
long create1Date = new Date().getTime(); String document1Id = clientUtil.createDocument(documentOdtToken);
JsonObject json = target().path("/document").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, documentOdtToken)
.put(Entity.form(new Form()
.param("title", "My super title document 1")
.param("description", "My super description for document 1")
.param("language", "eng")
.param("create_date", Long.toString(create1Date))), JsonObject.class);
String document1Id = json.getString("id");
Assert.assertNotNull(document1Id);
// Add a PDF file // Add a PDF file
String file1Id = clientUtil.addFileToDocument("file/document.odt", "document.odt", documentOdtToken, document1Id); String file1Id = clientUtil.addFileToDocument(FILE_DOCUMENT_ODT, documentOdtToken, document1Id);
// Search documents by query in full content // Search documents by query in full content
json = target().path("/document/list") JsonObject json = target().path("/document/list")
.queryParam("search", "full:ipsum") .queryParam("search", "full:ipsum")
.request() .request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, documentOdtToken) .cookie(TokenBasedSecurityFilter.COOKIE_NAME, documentOdtToken)
@@ -439,7 +470,6 @@ public class TestDocumentResource extends BaseJerseyTest {
InputStream is = (InputStream) response.getEntity(); InputStream is = (InputStream) response.getEntity();
byte[] fileBytes = ByteStreams.toByteArray(is); byte[] fileBytes = ByteStreams.toByteArray(is);
Assert.assertTrue(fileBytes.length > 0); // Images rendered from PDF differ in size from OS to OS due to font issues Assert.assertTrue(fileBytes.length > 0); // Images rendered from PDF differ in size from OS to OS due to font issues
Assert.assertEquals(MimeType.IMAGE_JPEG, MimeTypeUtil.guessMimeType(fileBytes, null));
// Export a document in PDF format // Export a document in PDF format
response = target().path("/document/" + document1Id + "/pdf") response = target().path("/document/" + document1Id + "/pdf")
@@ -468,22 +498,13 @@ public class TestDocumentResource extends BaseJerseyTest {
String documentDocxToken = clientUtil.login("document_docx"); String documentDocxToken = clientUtil.login("document_docx");
// Create a document // Create a document
long create1Date = new Date().getTime(); String document1Id = clientUtil.createDocument(documentDocxToken);
JsonObject json = target().path("/document").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, documentDocxToken)
.put(Entity.form(new Form()
.param("title", "My super title document 1")
.param("description", "My super description for document 1")
.param("language", "eng")
.param("create_date", Long.toString(create1Date))), JsonObject.class);
String document1Id = json.getString("id");
Assert.assertNotNull(document1Id);
// Add a PDF file // Add a PDF file
String file1Id = clientUtil.addFileToDocument("file/document.docx", "document.docx", documentDocxToken, document1Id); String file1Id = clientUtil.addFileToDocument(FILE_DOCUMENT_DOCX, documentDocxToken, document1Id);
// Search documents by query in full content // Search documents by query in full content
json = target().path("/document/list") JsonObject json = target().path("/document/list")
.queryParam("search", "full:dolor") .queryParam("search", "full:dolor")
.request() .request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, documentDocxToken) .cookie(TokenBasedSecurityFilter.COOKIE_NAME, documentDocxToken)
@@ -499,7 +520,6 @@ public class TestDocumentResource extends BaseJerseyTest {
InputStream is = (InputStream) response.getEntity(); InputStream is = (InputStream) response.getEntity();
byte[] fileBytes = ByteStreams.toByteArray(is); byte[] fileBytes = ByteStreams.toByteArray(is);
Assert.assertTrue(fileBytes.length > 0); // Images rendered from PDF differ in size from OS to OS due to font issues Assert.assertTrue(fileBytes.length > 0); // Images rendered from PDF differ in size from OS to OS due to font issues
Assert.assertEquals(MimeType.IMAGE_JPEG, MimeTypeUtil.guessMimeType(fileBytes, null));
// Export a document in PDF format // Export a document in PDF format
response = target().path("/document/" + document1Id + "/pdf") response = target().path("/document/" + document1Id + "/pdf")
@@ -528,22 +548,13 @@ public class TestDocumentResource extends BaseJerseyTest {
String documentPdfToken = clientUtil.login("document_pdf"); String documentPdfToken = clientUtil.login("document_pdf");
// Create a document // Create a document
long create1Date = new Date().getTime(); String document1Id = clientUtil.createDocument(documentPdfToken);
JsonObject json = target().path("/document").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, documentPdfToken)
.put(Entity.form(new Form()
.param("title", "My super title document 1")
.param("description", "My super description for document 1")
.param("language", "eng")
.param("create_date", Long.toString(create1Date))), JsonObject.class);
String document1Id = json.getString("id");
Assert.assertNotNull(document1Id);
// Add a PDF file // Add a PDF file
String file1Id = clientUtil.addFileToDocument("file/wikipedia.pdf", "wikipedia.pdf", documentPdfToken, document1Id); String file1Id = clientUtil.addFileToDocument(FILE_WIKIPEDIA_PDF, documentPdfToken, document1Id);
// Search documents by query in full content // Search documents by query in full content
json = target().path("/document/list") JsonObject json = target().path("/document/list")
.queryParam("search", "full:vrandecic") .queryParam("search", "full:vrandecic")
.request() .request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, documentPdfToken) .cookie(TokenBasedSecurityFilter.COOKIE_NAME, documentPdfToken)
@@ -559,7 +570,6 @@ public class TestDocumentResource extends BaseJerseyTest {
InputStream is = (InputStream) response.getEntity(); InputStream is = (InputStream) response.getEntity();
byte[] fileBytes = ByteStreams.toByteArray(is); byte[] fileBytes = ByteStreams.toByteArray(is);
Assert.assertTrue(fileBytes.length > 0); // Images rendered from PDF differ in size from OS to OS due to font issues Assert.assertTrue(fileBytes.length > 0); // Images rendered from PDF differ in size from OS to OS due to font issues
Assert.assertEquals(MimeType.IMAGE_JPEG, MimeTypeUtil.guessMimeType(fileBytes, null));
// Export a document in PDF format // Export a document in PDF format
response = target().path("/document/" + document1Id + "/pdf") response = target().path("/document/" + document1Id + "/pdf")
@@ -588,22 +598,13 @@ public class TestDocumentResource extends BaseJerseyTest {
String documentPlainToken = clientUtil.login("document_plain"); String documentPlainToken = clientUtil.login("document_plain");
// Create a document // Create a document
long create1Date = new Date().getTime(); String document1Id = clientUtil.createDocument(documentPlainToken);
JsonObject json = target().path("/document").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, documentPlainToken)
.put(Entity.form(new Form()
.param("title", "My super title document 1")
.param("description", "My super description for document 1")
.param("language", "eng")
.param("create_date", Long.toString(create1Date))), JsonObject.class);
String document1Id = json.getString("id");
Assert.assertNotNull(document1Id);
// Add a plain text file // Add a plain text file
String file1Id = clientUtil.addFileToDocument("file/document.txt", "document.txt", documentPlainToken, document1Id); String file1Id = clientUtil.addFileToDocument(FILE_DOCUMENT_TXT, documentPlainToken, document1Id);
// Search documents by query in full content // Search documents by query in full content
json = target().path("/document/list") JsonObject json = target().path("/document/list")
.queryParam("search", "full:love") .queryParam("search", "full:love")
.request() .request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, documentPlainToken) .cookie(TokenBasedSecurityFilter.COOKIE_NAME, documentPlainToken)
@@ -619,7 +620,6 @@ public class TestDocumentResource extends BaseJerseyTest {
InputStream is = (InputStream) response.getEntity(); InputStream is = (InputStream) response.getEntity();
byte[] fileBytes = ByteStreams.toByteArray(is); byte[] fileBytes = ByteStreams.toByteArray(is);
Assert.assertTrue(fileBytes.length > 0); // Images rendered from PDF differ in size from OS to OS due to font issues Assert.assertTrue(fileBytes.length > 0); // Images rendered from PDF differ in size from OS to OS due to font issues
Assert.assertEquals(MimeType.IMAGE_JPEG, MimeTypeUtil.guessMimeType(fileBytes, null));
// Get the content data // Get the content data
response = target().path("/file/" + file1Id + "/data") response = target().path("/file/" + file1Id + "/data")
@@ -658,22 +658,13 @@ public class TestDocumentResource extends BaseJerseyTest {
String documentVideoToken = clientUtil.login("document_video"); String documentVideoToken = clientUtil.login("document_video");
// Create a document // Create a document
long create1Date = new Date().getTime(); String document1Id = clientUtil.createDocument(documentVideoToken);
JsonObject json = target().path("/document").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, documentVideoToken)
.put(Entity.form(new Form()
.param("title", "My super title document 1")
.param("description", "My super description for document 1")
.param("language", "eng")
.param("create_date", Long.toString(create1Date))), JsonObject.class);
String document1Id = json.getString("id");
Assert.assertNotNull(document1Id);
// Add a video file // Add a video file
String file1Id = clientUtil.addFileToDocument("file/video.webm", "video.webm", documentVideoToken, document1Id); String file1Id = clientUtil.addFileToDocument(FILE_VIDEO_WEBM, documentVideoToken, document1Id);
// Search documents by query in full content // Search documents by query in full content
json = target().path("/document/list") JsonObject json = target().path("/document/list")
.queryParam("search", "full:vp9") .queryParam("search", "full:vp9")
.request() .request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, documentVideoToken) .cookie(TokenBasedSecurityFilter.COOKIE_NAME, documentVideoToken)
@@ -689,7 +680,6 @@ public class TestDocumentResource extends BaseJerseyTest {
InputStream is = (InputStream) response.getEntity(); InputStream is = (InputStream) response.getEntity();
byte[] fileBytes = ByteStreams.toByteArray(is); byte[] fileBytes = ByteStreams.toByteArray(is);
Assert.assertTrue(fileBytes.length > 0); // Images rendered from PDF differ in size from OS to OS due to font issues Assert.assertTrue(fileBytes.length > 0); // Images rendered from PDF differ in size from OS to OS due to font issues
Assert.assertEquals(MimeType.IMAGE_JPEG, MimeTypeUtil.guessMimeType(fileBytes, null));
// Export a document in PDF format // Export a document in PDF format
response = target().path("/document/" + document1Id + "/pdf") response = target().path("/document/" + document1Id + "/pdf")
@@ -718,22 +708,13 @@ public class TestDocumentResource extends BaseJerseyTest {
String documentPptxToken = clientUtil.login("document_pptx"); String documentPptxToken = clientUtil.login("document_pptx");
// Create a document // Create a document
long create1Date = new Date().getTime(); String document1Id = clientUtil.createDocument(documentPptxToken);
JsonObject json = target().path("/document").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, documentPptxToken)
.put(Entity.form(new Form()
.param("title", "My super title document 1")
.param("description", "My super description for document 1")
.param("language", "eng")
.param("create_date", Long.toString(create1Date))), JsonObject.class);
String document1Id = json.getString("id");
Assert.assertNotNull(document1Id);
// Add a PPTX file // Add a PPTX file
String file1Id = clientUtil.addFileToDocument("file/apache.pptx", "apache.pptx", documentPptxToken, document1Id); String file1Id = clientUtil.addFileToDocument(FILE_APACHE_PPTX, documentPptxToken, document1Id);
// Search documents by query in full content // Search documents by query in full content
json = target().path("/document/list") JsonObject json = target().path("/document/list")
.queryParam("search", "full:scaling") .queryParam("search", "full:scaling")
.request() .request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, documentPptxToken) .cookie(TokenBasedSecurityFilter.COOKIE_NAME, documentPptxToken)
@@ -749,7 +730,6 @@ public class TestDocumentResource extends BaseJerseyTest {
InputStream is = (InputStream) response.getEntity(); InputStream is = (InputStream) response.getEntity();
byte[] fileBytes = ByteStreams.toByteArray(is); byte[] fileBytes = ByteStreams.toByteArray(is);
Assert.assertTrue(fileBytes.length > 0); // Images rendered from PDF differ in size from OS to OS due to font issues Assert.assertTrue(fileBytes.length > 0); // Images rendered from PDF differ in size from OS to OS due to font issues
Assert.assertEquals(MimeType.IMAGE_JPEG, MimeTypeUtil.guessMimeType(fileBytes, null));
// Export a document in PDF format // Export a document in PDF format
response = target().path("/document/" + document1Id + "/pdf") response = target().path("/document/" + document1Id + "/pdf")

View File

@@ -23,6 +23,7 @@ import java.io.InputStream;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.Date; import java.util.Date;
import java.util.zip.ZipInputStream;
/** /**
* Exhaustive test of the file resource. * Exhaustive test of the file resource.
@@ -37,53 +38,18 @@ public class TestFileResource extends BaseJerseyTest {
*/ */
@Test @Test
public void testFileResource() throws Exception { public void testFileResource() throws Exception {
// Login file1 // Login file_resources
clientUtil.createUser("file1"); clientUtil.createUser("file_resources");
String file1Token = clientUtil.login("file1"); String file1Token = clientUtil.login("file_resources");
// Create a document // Create a document
long create1Date = new Date().getTime(); String document1Id = clientUtil.createDocument(file1Token);
JsonObject json = target().path("/document").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, file1Token)
.put(Entity.form(new Form()
.param("title", "File test document 1")
.param("language", "eng")
.param("create_date", Long.toString(create1Date))), JsonObject.class);
String document1Id = json.getString("id");
Assert.assertNotNull(document1Id);
// Add a file // Add a file
String file1Id; String file1Id = clientUtil.addFileToDocument(FILE_PIA_00452_JPG, file1Token, document1Id);
try (InputStream is = Resources.getResource("file/PIA00452.jpg").openStream()) {
StreamDataBodyPart streamDataBodyPart = new StreamDataBodyPart("file", is, "PIA00452.jpg");
try (FormDataMultiPart multiPart = new FormDataMultiPart()) {
json = target()
.register(MultiPartFeature.class)
.path("/file").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, file1Token)
.put(Entity.entity(multiPart.field("id", document1Id).bodyPart(streamDataBodyPart),
MediaType.MULTIPART_FORM_DATA_TYPE), JsonObject.class);
file1Id = json.getString("id");
Assert.assertNotNull(file1Id);
Assert.assertEquals(163510L, json.getJsonNumber("size").longValue());
}
}
// Add a file // Add a file
String file2Id; String file2Id = clientUtil.addFileToDocument(FILE_PIA_00452_JPG, file1Token, document1Id);
try (InputStream is = Resources.getResource("file/PIA00452.jpg").openStream()) {
StreamDataBodyPart streamDataBodyPart = new StreamDataBodyPart("file", is, "PIA00452.jpg");
try (FormDataMultiPart multiPart = new FormDataMultiPart()) {
json = target()
.register(MultiPartFeature.class)
.path("/file").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, file1Token)
.put(Entity.entity(multiPart.field("id", document1Id).bodyPart(streamDataBodyPart),
MediaType.MULTIPART_FORM_DATA_TYPE), JsonObject.class);
file2Id = json.getString("id");
Assert.assertNotNull(file2Id);
}
}
// Get the file data // Get the file data
Response response = target().path("/file/" + file1Id + "/data").request() Response response = target().path("/file/" + file1Id + "/data").request()
@@ -91,7 +57,6 @@ public class TestFileResource extends BaseJerseyTest {
.get(); .get();
InputStream is = (InputStream) response.getEntity(); InputStream is = (InputStream) response.getEntity();
byte[] fileBytes = ByteStreams.toByteArray(is); byte[] fileBytes = ByteStreams.toByteArray(is);
Assert.assertEquals(MimeType.IMAGE_JPEG, MimeTypeUtil.guessMimeType(fileBytes, null));
Assert.assertTrue(fileBytes.length > 0); Assert.assertTrue(fileBytes.length > 0);
// Get the thumbnail data // Get the thumbnail data
@@ -103,7 +68,6 @@ public class TestFileResource extends BaseJerseyTest {
Assert.assertEquals(Status.OK, Status.fromStatusCode(response.getStatus())); Assert.assertEquals(Status.OK, Status.fromStatusCode(response.getStatus()));
is = (InputStream) response.getEntity(); is = (InputStream) response.getEntity();
fileBytes = ByteStreams.toByteArray(is); fileBytes = ByteStreams.toByteArray(is);
Assert.assertEquals(MimeType.IMAGE_JPEG, MimeTypeUtil.guessMimeType(fileBytes, null));
Assert.assertTrue(fileBytes.length > 0); Assert.assertTrue(fileBytes.length > 0);
// Get the content data // Get the content data
@@ -123,7 +87,6 @@ public class TestFileResource extends BaseJerseyTest {
Assert.assertEquals(Status.OK, Status.fromStatusCode(response.getStatus())); Assert.assertEquals(Status.OK, Status.fromStatusCode(response.getStatus()));
is = (InputStream) response.getEntity(); is = (InputStream) response.getEntity();
fileBytes = ByteStreams.toByteArray(is); fileBytes = ByteStreams.toByteArray(is);
Assert.assertEquals(MimeType.IMAGE_JPEG, MimeTypeUtil.guessMimeType(fileBytes, null));
Assert.assertTrue(fileBytes.length > 0); Assert.assertTrue(fileBytes.length > 0);
// Check that the files are not readable directly from FS // Check that the files are not readable directly from FS
@@ -131,7 +94,7 @@ public class TestFileResource extends BaseJerseyTest {
Assert.assertEquals(MimeType.DEFAULT, MimeTypeUtil.guessMimeType(storedFile, null)); Assert.assertEquals(MimeType.DEFAULT, MimeTypeUtil.guessMimeType(storedFile, null));
// Get all files from a document // Get all files from a document
json = target().path("/file/list") JsonObject json = target().path("/file/list")
.queryParam("id", document1Id) .queryParam("id", document1Id)
.request() .request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, file1Token) .cookie(TokenBasedSecurityFilter.COOKIE_NAME, file1Token)
@@ -191,9 +154,6 @@ public class TestFileResource extends BaseJerseyTest {
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, file1Token) .cookie(TokenBasedSecurityFilter.COOKIE_NAME, file1Token)
.get(); .get();
Assert.assertEquals(Status.OK, Status.fromStatusCode(response.getStatus())); Assert.assertEquals(Status.OK, Status.fromStatusCode(response.getStatus()));
is = (InputStream) response.getEntity();
fileBytes = ByteStreams.toByteArray(is);
Assert.assertEquals(MimeType.APPLICATION_ZIP, MimeTypeUtil.guessMimeType(fileBytes, null));
// Deletes a file // Deletes a file
json = target().path("/file/" + file1Id).request() json = target().path("/file/" + file1Id).request()
@@ -294,44 +254,82 @@ public class TestFileResource extends BaseJerseyTest {
Assert.assertEquals(1, files.getJsonObject(0).getInt("version")); Assert.assertEquals(1, files.getJsonObject(0).getInt("version"));
} }
@Test
public void testFileResourceZip() throws Exception {
// Login file_resources
clientUtil.createUser("file_resources_zip");
String file1Token = clientUtil.login("file_resources_zip");
// Create a document
String document1Id = clientUtil.createDocument(file1Token);
// Add a file
String file1Id = clientUtil.addFileToDocument(FILE_PIA_00452_JPG, file1Token, document1Id);
// Get a ZIP from all files of the document
Response response = target().path("/file/zip")
.queryParam("id", document1Id)
.request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, file1Token)
.get();
Assert.assertEquals(Status.OK, Status.fromStatusCode(response.getStatus()));
InputStream is = (InputStream) response.getEntity();
ZipInputStream zipInputStream = new ZipInputStream(is);
Assert.assertEquals(zipInputStream.getNextEntry().getName(), "0-PIA00452.jpg");
Assert.assertNull(zipInputStream.getNextEntry());
// Fail if we don't have access to the document
response = target().path("/file/zip")
.queryParam("id", document1Id)
.request()
.get();
Assert.assertEquals(Status.NOT_FOUND, Status.fromStatusCode(response.getStatus()));
// Create a document
String document2Id = clientUtil.createDocument(file1Token);
// Add a file
String file2Id = clientUtil.addFileToDocument(FILE_EINSTEIN_ROOSEVELT_LETTER_PNG, file1Token, document2Id);
// Get a ZIP from both files
response = target().path("/file/zip")
.request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, file1Token)
.post(Entity.form(new Form()
.param("files", file1Id)
.param("files", file2Id)));
Assert.assertEquals(Status.OK, Status.fromStatusCode(response.getStatus()));
is = (InputStream) response.getEntity();
zipInputStream = new ZipInputStream(is);
Assert.assertNotNull(zipInputStream.getNextEntry().getName());
Assert.assertNotNull(zipInputStream.getNextEntry().getName());
Assert.assertNull(zipInputStream.getNextEntry());
// Fail if we don't have access to the files
response = target().path("/file/zip")
.request()
.post(Entity.form(new Form()
.param("files", file1Id)
.param("files", file2Id)));
Assert.assertEquals(Status.FORBIDDEN, Status.fromStatusCode(response.getStatus()));
}
/** /**
* Test using a ZIP file. * Test using a ZIP file.
* *
* @throws Exception e * @throws Exception e
*/ */
@Test @Test
public void testZipFile() throws Exception { public void testZipFileUpload() throws Exception {
// Login file1 // Login file_zip
clientUtil.createUser("file2"); clientUtil.createUser("file_zip");
String file2Token = clientUtil.login("file2"); String fileZipToken = clientUtil.login("file_zip");
// Create a document // Create a document
long create1Date = new Date().getTime(); String document1Id = clientUtil.createDocument(fileZipToken);
JsonObject json = target().path("/document").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, file2Token)
.put(Entity.form(new Form()
.param("title", "File test document 1")
.param("language", "eng")
.param("create_date", Long.toString(create1Date))), JsonObject.class);
String document1Id = json.getString("id");
Assert.assertNotNull(document1Id);
// Add a file // Add a file
String file1Id; clientUtil.addFileToDocument(FILE_WIKIPEDIA_ZIP, fileZipToken, document1Id);
try (InputStream is = Resources.getResource("file/wikipedia.zip").openStream()) {
StreamDataBodyPart streamDataBodyPart = new StreamDataBodyPart("file", is, "wikipedia.zip");
try (FormDataMultiPart multiPart = new FormDataMultiPart()) {
json = target()
.register(MultiPartFeature.class)
.path("/file").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, file2Token)
.put(Entity.entity(multiPart.field("id", document1Id).bodyPart(streamDataBodyPart),
MediaType.MULTIPART_FORM_DATA_TYPE), JsonObject.class);
file1Id = json.getString("id");
Assert.assertNotNull(file1Id);
Assert.assertEquals(525069L, json.getJsonNumber("size").longValue());
}
}
} }
/** /**
@@ -341,29 +339,16 @@ public class TestFileResource extends BaseJerseyTest {
*/ */
@Test @Test
public void testOrphanFile() throws Exception { public void testOrphanFile() throws Exception {
// Login file3 // Login file_orphan
clientUtil.createUser("file3"); clientUtil.createUser("file_orphan");
String file3Token = clientUtil.login("file3"); String fileOrphanToken = clientUtil.login("file_orphan");
// Add a file // Add a file
String file1Id; String file1Id = clientUtil.addFileToDocument(FILE_PIA_00452_JPG, fileOrphanToken, null);
try (InputStream is = Resources.getResource("file/PIA00452.jpg").openStream()) {
StreamDataBodyPart streamDataBodyPart = new StreamDataBodyPart("file", is, "PIA00452.jpg");
try (FormDataMultiPart multiPart = new FormDataMultiPart()) {
JsonObject json = target()
.register(MultiPartFeature.class)
.path("/file").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, file3Token)
.put(Entity.entity(multiPart.bodyPart(streamDataBodyPart),
MediaType.MULTIPART_FORM_DATA_TYPE), JsonObject.class);
file1Id = json.getString("id");
Assert.assertNotNull(file1Id);
}
}
// Get all orphan files // Get all orphan files
JsonObject json = target().path("/file/list").request() JsonObject json = target().path("/file/list").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, file3Token) .cookie(TokenBasedSecurityFilter.COOKIE_NAME, fileOrphanToken)
.get(JsonObject.class); .get(JsonObject.class);
JsonArray files = json.getJsonArray("files"); JsonArray files = json.getJsonArray("files");
Assert.assertEquals(1, files.size()); Assert.assertEquals(1, files.size());
@@ -372,66 +357,45 @@ public class TestFileResource extends BaseJerseyTest {
Response response = target().path("/file/" + file1Id + "/data") Response response = target().path("/file/" + file1Id + "/data")
.queryParam("size", "thumb") .queryParam("size", "thumb")
.request() .request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, file3Token) .cookie(TokenBasedSecurityFilter.COOKIE_NAME, fileOrphanToken)
.get(); .get();
Assert.assertEquals(Status.OK, Status.fromStatusCode(response.getStatus())); Assert.assertEquals(Status.OK, Status.fromStatusCode(response.getStatus()));
InputStream is = (InputStream) response.getEntity(); InputStream is = (InputStream) response.getEntity();
byte[] fileBytes = ByteStreams.toByteArray(is); byte[] fileBytes = ByteStreams.toByteArray(is);
Assert.assertEquals(MimeType.IMAGE_JPEG, MimeTypeUtil.guessMimeType(fileBytes, null));
Assert.assertTrue(fileBytes.length > 0); Assert.assertTrue(fileBytes.length > 0);
// Get the file data // Get the file data
response = target().path("/file/" + file1Id + "/data").request() response = target().path("/file/" + file1Id + "/data").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, file3Token) .cookie(TokenBasedSecurityFilter.COOKIE_NAME, fileOrphanToken)
.get(); .get();
is = (InputStream) response.getEntity(); is = (InputStream) response.getEntity();
fileBytes = ByteStreams.toByteArray(is); fileBytes = ByteStreams.toByteArray(is);
Assert.assertEquals(MimeType.IMAGE_JPEG, MimeTypeUtil.guessMimeType(fileBytes, null));
Assert.assertEquals(163510, fileBytes.length); Assert.assertEquals(163510, fileBytes.length);
// Create a document // Create another document
json = target().path("/document").request() String document2Id = clientUtil.createDocument(fileOrphanToken);
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, file3Token)
.put(Entity.form(new Form()
.param("title", "File test document 1")
.param("language", "eng")), JsonObject.class);
String document1Id = json.getString("id");
Assert.assertNotNull(document1Id);
// Attach a file to a document // Attach a file to a document
target().path("/file/" + file1Id + "/attach").request() target().path("/file/" + file1Id + "/attach").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, file3Token) .cookie(TokenBasedSecurityFilter.COOKIE_NAME, fileOrphanToken)
.post(Entity.form(new Form() .post(Entity.form(new Form()
.param("id", document1Id)), JsonObject.class); .param("id", document2Id)), JsonObject.class);
// Get all files from a document // Get all files from a document
json = target().path("/file/list") json = target().path("/file/list")
.queryParam("id", document1Id) .queryParam("id", document2Id)
.request() .request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, file3Token) .cookie(TokenBasedSecurityFilter.COOKIE_NAME, fileOrphanToken)
.get(JsonObject.class); .get(JsonObject.class);
files = json.getJsonArray("files"); files = json.getJsonArray("files");
Assert.assertEquals(1, files.size()); Assert.assertEquals(1, files.size());
// Add a file // Add a file
String file2Id; String file2Id = clientUtil.addFileToDocument(FILE_PIA_00452_JPG, fileOrphanToken, null);
try (InputStream is0 = Resources.getResource("file/PIA00452.jpg").openStream()) {
StreamDataBodyPart streamDataBodyPart = new StreamDataBodyPart("file", is0, "PIA00452.jpg");
try (FormDataMultiPart multiPart = new FormDataMultiPart()) {
json = target()
.register(MultiPartFeature.class)
.path("/file").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, file3Token)
.put(Entity.entity(multiPart.bodyPart(streamDataBodyPart),
MediaType.MULTIPART_FORM_DATA_TYPE), JsonObject.class);
file2Id = json.getString("id");
Assert.assertNotNull(file2Id);
}
}
// Deletes a file // Deletes a file
json = target().path("/file/" + file2Id).request() json = target().path("/file/" + file2Id).request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, file3Token) .cookie(TokenBasedSecurityFilter.COOKIE_NAME, fileOrphanToken)
.delete(JsonObject.class); .delete(JsonObject.class);
Assert.assertEquals("ok", json.getString("status")); Assert.assertEquals("ok", json.getString("status"));
} }
@@ -448,20 +412,7 @@ public class TestFileResource extends BaseJerseyTest {
String fileQuotaToken = clientUtil.login("file_quota"); String fileQuotaToken = clientUtil.login("file_quota");
// Add a file (292641 bytes large) // Add a file (292641 bytes large)
String file1Id; String file1Id = clientUtil.addFileToDocument(FILE_EINSTEIN_ROOSEVELT_LETTER_PNG, fileQuotaToken, null);
try (InputStream is = Resources.getResource("file/Einstein-Roosevelt-letter.png").openStream()) {
StreamDataBodyPart streamDataBodyPart = new StreamDataBodyPart("file", is, "Einstein-Roosevelt-letter.png");
try (FormDataMultiPart multiPart = new FormDataMultiPart()) {
JsonObject json = target()
.register(MultiPartFeature.class)
.path("/file").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, fileQuotaToken)
.put(Entity.entity(multiPart.bodyPart(streamDataBodyPart),
MediaType.MULTIPART_FORM_DATA_TYPE), JsonObject.class);
file1Id = json.getString("id");
Assert.assertNotNull(file1Id);
}
}
// Check current quota // Check current quota
JsonObject json = target().path("/user").request() JsonObject json = target().path("/user").request()
@@ -470,17 +421,7 @@ public class TestFileResource extends BaseJerseyTest {
Assert.assertEquals(292641L, json.getJsonNumber("storage_current").longValue()); Assert.assertEquals(292641L, json.getJsonNumber("storage_current").longValue());
// Add a file (292641 bytes large) // Add a file (292641 bytes large)
try (InputStream is = Resources.getResource("file/Einstein-Roosevelt-letter.png").openStream()) { clientUtil.addFileToDocument(FILE_EINSTEIN_ROOSEVELT_LETTER_PNG, fileQuotaToken, null);
StreamDataBodyPart streamDataBodyPart = new StreamDataBodyPart("file", is, "Einstein-Roosevelt-letter.png");
try (FormDataMultiPart multiPart = new FormDataMultiPart()) {
target()
.register(MultiPartFeature.class)
.path("/file").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, fileQuotaToken)
.put(Entity.entity(multiPart.bodyPart(streamDataBodyPart),
MediaType.MULTIPART_FORM_DATA_TYPE), JsonObject.class);
}
}
// Check current quota // Check current quota
json = target().path("/user").request() json = target().path("/user").request()
@@ -489,17 +430,7 @@ public class TestFileResource extends BaseJerseyTest {
Assert.assertEquals(585282L, json.getJsonNumber("storage_current").longValue()); Assert.assertEquals(585282L, json.getJsonNumber("storage_current").longValue());
// Add a file (292641 bytes large) // Add a file (292641 bytes large)
try (InputStream is = Resources.getResource("file/Einstein-Roosevelt-letter.png").openStream()) { clientUtil.addFileToDocument(FILE_EINSTEIN_ROOSEVELT_LETTER_PNG, fileQuotaToken, null);
StreamDataBodyPart streamDataBodyPart = new StreamDataBodyPart("file", is, "Einstein-Roosevelt-letter.png");
try (FormDataMultiPart multiPart = new FormDataMultiPart()) {
target()
.register(MultiPartFeature.class)
.path("/file").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, fileQuotaToken)
.put(Entity.entity(multiPart.bodyPart(streamDataBodyPart),
MediaType.MULTIPART_FORM_DATA_TYPE), JsonObject.class);
}
}
// Check current quota // Check current quota
json = target().path("/user").request() json = target().path("/user").request()
@@ -508,17 +439,10 @@ public class TestFileResource extends BaseJerseyTest {
Assert.assertEquals(877923L, json.getJsonNumber("storage_current").longValue()); Assert.assertEquals(877923L, json.getJsonNumber("storage_current").longValue());
// Add a file (292641 bytes large) // Add a file (292641 bytes large)
try (InputStream is = Resources.getResource("file/Einstein-Roosevelt-letter.png").openStream()) { try {
StreamDataBodyPart streamDataBodyPart = new StreamDataBodyPart("file", is, "Einstein-Roosevelt-letter.png"); clientUtil.addFileToDocument(FILE_EINSTEIN_ROOSEVELT_LETTER_PNG, fileQuotaToken, null);
try (FormDataMultiPart multiPart = new FormDataMultiPart()) { Assert.fail();
Response response = target() } catch (javax.ws.rs.BadRequestException ignored) {
.register(MultiPartFeature.class)
.path("/file").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, fileQuotaToken)
.put(Entity.entity(multiPart.bodyPart(streamDataBodyPart),
MediaType.MULTIPART_FORM_DATA_TYPE));
Assert.assertEquals(Status.BAD_REQUEST.getStatusCode(), response.getStatus());
}
} }
// Deletes a file // Deletes a file
@@ -545,17 +469,7 @@ public class TestFileResource extends BaseJerseyTest {
Assert.assertNotNull(document1Id); Assert.assertNotNull(document1Id);
// Add a file to this document (163510 bytes large) // Add a file to this document (163510 bytes large)
try (InputStream is = Resources.getResource("file/PIA00452.jpg").openStream()) { clientUtil.addFileToDocument(FILE_PIA_00452_JPG, fileQuotaToken, document1Id);
StreamDataBodyPart streamDataBodyPart = new StreamDataBodyPart("file", is, "PIA00452.jpg");
try (FormDataMultiPart multiPart = new FormDataMultiPart()) {
target()
.register(MultiPartFeature.class)
.path("/file").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, fileQuotaToken)
.put(Entity.entity(multiPart.field("id", document1Id).bodyPart(streamDataBodyPart),
MediaType.MULTIPART_FORM_DATA_TYPE), JsonObject.class);
}
}
// Check current quota // Check current quota
json = target().path("/user").request() json = target().path("/user").request()

View File

@@ -439,13 +439,11 @@ public class TestUserResource extends BaseJerseyTest {
// Create absent_minded who lost his password // Create absent_minded who lost his password
clientUtil.createUser("absent_minded"); clientUtil.createUser("absent_minded");
// User no_such_user try to recovery its password: invalid user // User no_such_user try to recovery its password: silently do nothing to avoid leaking users
Response response = target().path("/user/password_lost").request() JsonObject json = target().path("/user/password_lost").request()
.post(Entity.form(new Form() .post(Entity.form(new Form()
.param("username", "no_such_user"))); .param("username", "no_such_user")), JsonObject.class);
Assert.assertEquals(Response.Status.BAD_REQUEST, Response.Status.fromStatusCode(response.getStatus())); Assert.assertEquals("ok", json.getString("status"));
JsonObject json = response.readEntity(JsonObject.class);
Assert.assertEquals("UserNotFound", json.getString("type"));
// User absent_minded try to recovery its password: OK // User absent_minded try to recovery its password: OK
json = target().path("/user/password_lost").request() json = target().path("/user/password_lost").request()
@@ -461,7 +459,7 @@ public class TestUserResource extends BaseJerseyTest {
String key = keyMatcher.group(1).replaceAll("=", ""); String key = keyMatcher.group(1).replaceAll("=", "");
// User absent_minded resets its password: invalid key // User absent_minded resets its password: invalid key
response = target().path("/user/password_reset").request() Response response = target().path("/user/password_reset").request()
.post(Entity.form(new Form() .post(Entity.form(new Form()
.param("key", "no_such_key") .param("key", "no_such_key")
.param("password", "87654321"))); .param("password", "87654321")));

View File

@@ -1,8 +1,10 @@
package com.sismics.docs.rest.resource; package com.sismics.docs.rest.resource;
import javax.json.JsonObject; import javax.json.JsonObject;
import javax.ws.rs.Consumes;
import javax.ws.rs.POST; import javax.ws.rs.POST;
import javax.ws.rs.Path; import javax.ws.rs.Path;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response; import javax.ws.rs.core.Response;
/** /**
@@ -23,6 +25,7 @@ public class ThirdPartyWebhookResource extends BaseResource {
* @return Response * @return Response
*/ */
@POST @POST
@Consumes(MediaType.APPLICATION_JSON)
public Response webhook(JsonObject request) { public Response webhook(JsonObject request) {
lastPayload = request; lastPayload = request;
return Response.ok().build(); return Response.ok().build();

View File

@@ -11,3 +11,5 @@ log4j.logger.org.hibernate=ERROR
log4j.logger.org.apache.pdfbox=INFO log4j.logger.org.apache.pdfbox=INFO
log4j.logger.com.mchange=ERROR log4j.logger.com.mchange=ERROR
log4j.logger.org.apache.directory=ERROR log4j.logger.org.apache.directory=ERROR
log4j.logger.org.glassfish.grizzly=ERROR
log4j.logger.org.odftoolkit=ERROR

0
docs/.gitkeep Normal file
View File

78
pom.xml
View File

@@ -6,13 +6,13 @@
<groupId>com.sismics.docs</groupId> <groupId>com.sismics.docs</groupId>
<artifactId>docs-parent</artifactId> <artifactId>docs-parent</artifactId>
<packaging>pom</packaging> <packaging>pom</packaging>
<version>1.9</version> <version>1.11</version>
<name>Docs Parent</name> <name>Docs Parent</name>
<properties> <properties>
<maven.compiler.source>1.8</maven.compiler.source> <maven.compiler.source>11</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target> <maven.compiler.target>11</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<!-- Dependencies version (external) --> <!-- Dependencies version (external) -->
@@ -20,48 +20,48 @@
<commons-lang.commons-lang.version>2.6</commons-lang.commons-lang.version> <commons-lang.commons-lang.version>2.6</commons-lang.commons-lang.version>
<commons-io.commons-io.version>2.6</commons-io.commons-io.version> <commons-io.commons-io.version>2.6</commons-io.commons-io.version>
<org.apache.commons.commons-email.version>1.5</org.apache.commons.commons-email.version> <org.apache.commons.commons-email.version>1.5</org.apache.commons.commons-email.version>
<org.freemarker.freemarker.version>2.3.28</org.freemarker.freemarker.version> <org.freemarker.freemarker.version>2.3.30</org.freemarker.freemarker.version>
<commons-dbcp.version>1.4</commons-dbcp.version> <commons-dbcp.version>1.4</commons-dbcp.version>
<com.google.guava.guava.version>28.2-jre</com.google.guava.guava.version> <com.google.guava.guava.version>30.1-jre</com.google.guava.guava.version>
<log4j.log4j.version>1.2.16</log4j.log4j.version> <log4j.log4j.version>1.2.17</log4j.log4j.version>
<org.slf4j.version>1.6.4</org.slf4j.version> <org.slf4j.version>1.7.30</org.slf4j.version>
<org.slf4j.jcl-over-slf4j.version>1.6.6</org.slf4j.jcl-over-slf4j.version> <org.slf4j.jcl-over-slf4j.version>1.7.30</org.slf4j.jcl-over-slf4j.version>
<org.slf4j.jul-to-slf4j.version>1.6.6</org.slf4j.jul-to-slf4j.version> <org.slf4j.jul-to-slf4j.version>1.7.30</org.slf4j.jul-to-slf4j.version>
<junit.junit.version>4.12</junit.junit.version> <junit.junit.version>4.13.1</junit.junit.version>
<com.h2database.h2.version>1.4.197</com.h2database.h2.version> <com.h2database.h2.version>1.4.199</com.h2database.h2.version>
<org.glassfish.jersey.version>2.27</org.glassfish.jersey.version> <org.glassfish.jersey.version>2.33</org.glassfish.jersey.version>
<org.glassfish.javax.json.version>1.1.3</org.glassfish.javax.json.version> <org.glassfish.javax.json.version>1.1.4</org.glassfish.javax.json.version>
<org.mindrot.jbcrypt>0.3m</org.mindrot.jbcrypt> <at.favre.lib.bcrypt.version>0.9.0</at.favre.lib.bcrypt.version>
<org.apache.lucene.version>7.5.0</org.apache.lucene.version> <org.apache.lucene.version>8.7.0</org.apache.lucene.version>
<org.imgscalr.imgscalr-lib.version>4.2</org.imgscalr.imgscalr-lib.version> <org.imgscalr.imgscalr-lib.version>4.2</org.imgscalr.imgscalr-lib.version>
<org.apache.pdfbox.pdfbox.version>2.0.12</org.apache.pdfbox.pdfbox.version> <org.apache.pdfbox.pdfbox.version>2.0.22</org.apache.pdfbox.pdfbox.version>
<org.bouncycastle.bcprov-jdk15on.version>1.61</org.bouncycastle.bcprov-jdk15on.version> <org.bouncycastle.bcprov-jdk15on.version>1.68</org.bouncycastle.bcprov-jdk15on.version>
<joda-time.joda-time.version>2.10</joda-time.joda-time.version> <joda-time.joda-time.version>2.10.9</joda-time.joda-time.version>
<org.hibernate.hibernate.version>5.3.7.Final</org.hibernate.hibernate.version> <org.hibernate.hibernate.version>5.4.27.Final</org.hibernate.hibernate.version>
<javax.servlet.javax.servlet-api.version>4.0.1</javax.servlet.javax.servlet-api.version> <javax.servlet.javax.servlet-api.version>4.0.1</javax.servlet.javax.servlet-api.version>
<fr.opensagres.xdocreport.version>2.0.1</fr.opensagres.xdocreport.version> <fr.opensagres.xdocreport.version>2.0.2</fr.opensagres.xdocreport.version>
<net.java.dev.jna.jna.version>4.2.1</net.java.dev.jna.jna.version> <net.java.dev.jna.jna.version>5.6.0</net.java.dev.jna.jna.version>
<com.twelvemonkeys.imageio.version>3.3.2</com.twelvemonkeys.imageio.version> <com.twelvemonkeys.imageio.version>3.6.2</com.twelvemonkeys.imageio.version>
<com.levigo.jbig2.levigo-jbig2-imageio.version>1.6.5</com.levigo.jbig2.levigo-jbig2-imageio.version> <com.levigo.jbig2.levigo-jbig2-imageio.version>2.0</com.levigo.jbig2.levigo-jbig2-imageio.version>
<com.github.jai-imageio.jai-imageio-jpeg2000.version>1.3.0</com.github.jai-imageio.jai-imageio-jpeg2000.version> <com.github.jai-imageio.jai-imageio-jpeg2000.version>1.4.0</com.github.jai-imageio.jai-imageio-jpeg2000.version>
<org.postgresql.postgresql.version>42.2.5</org.postgresql.postgresql.version> <org.postgresql.postgresql.version>42.2.18</org.postgresql.postgresql.version>
<org.subethamail.subethasmtp-wiser.version>1.2</org.subethamail.subethasmtp-wiser.version> <org.subethamail.subethasmtp-wiser.version>1.2</org.subethamail.subethasmtp-wiser.version>
<com.icegreen.greenmail.version>1.5.8</com.icegreen.greenmail.version> <com.icegreen.greenmail.version>1.5.8</com.icegreen.greenmail.version>
<com.sun.mail.javax.mail.version>1.6.2</com.sun.mail.javax.mail.version> <com.sun.mail.javax.mail.version>1.6.2</com.sun.mail.javax.mail.version>
<org.jsoup.jsoup.version>1.11.3</org.jsoup.jsoup.version> <org.jsoup.jsoup.version>1.13.1</org.jsoup.jsoup.version>
<com.squareup.okhttp3.okhttp.version>3.11.0</com.squareup.okhttp3.okhttp.version> <com.squareup.okhttp3.okhttp.version>4.9.0</com.squareup.okhttp3.okhttp.version>
<org.apache.directory.api.api-all.version>1.0.0</org.apache.directory.api.api-all.version> <org.apache.directory.api.api-all.version>2.0.1</org.apache.directory.api.api-all.version>
<org.eclipse.jetty.jetty-server.version>9.4.17.v20190418</org.eclipse.jetty.jetty-server.version> <org.eclipse.jetty.jetty-server.version>9.4.51.v20230217</org.eclipse.jetty.jetty-server.version>
<org.eclipse.jetty.jetty-webapp.version>9.4.17.v20190418</org.eclipse.jetty.jetty-webapp.version> <org.eclipse.jetty.jetty-webapp.version>9.4.51.v20230217</org.eclipse.jetty.jetty-webapp.version>
<org.eclipse.jetty.jetty-servlet.version>9.4.17.v20190418</org.eclipse.jetty.jetty-servlet.version> <org.eclipse.jetty.jetty-servlet.version>9.4.51.v20230217</org.eclipse.jetty.jetty-servlet.version>
<!-- Plugins version --> <!-- Plugins version -->
<org.apache.maven.plugins.maven-antrun-plugin.version>1.8</org.apache.maven.plugins.maven-antrun-plugin.version> <org.apache.maven.plugins.maven-antrun-plugin.version>3.0.0</org.apache.maven.plugins.maven-antrun-plugin.version>
<org.apache.maven.plugins.maven-jar-plugin.version>3.1.0</org.apache.maven.plugins.maven-jar-plugin.version> <org.apache.maven.plugins.maven-jar-plugin.version>3.2.0</org.apache.maven.plugins.maven-jar-plugin.version>
<org.apache.maven.plugins.maven-war-plugin.version>3.2.2</org.apache.maven.plugins.maven-war-plugin.version> <org.apache.maven.plugins.maven-war-plugin.version>3.3.1</org.apache.maven.plugins.maven-war-plugin.version>
<org.apache.maven.plugins.maven-surefire-plugin.version>2.22.1</org.apache.maven.plugins.maven-surefire-plugin.version> <org.apache.maven.plugins.maven-surefire-plugin.version>3.0.0-M5</org.apache.maven.plugins.maven-surefire-plugin.version>
<org.eclipse.jetty.jetty-maven-plugin.version>9.4.17.v20190418</org.eclipse.jetty.jetty-maven-plugin.version> <org.eclipse.jetty.jetty-maven-plugin.version>9.4.51.v20230217</org.eclipse.jetty.jetty-maven-plugin.version>
</properties> </properties>
<scm> <scm>
@@ -256,9 +256,9 @@
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.mindrot</groupId> <groupId>at.favre.lib</groupId>
<artifactId>jbcrypt</artifactId> <artifactId>bcrypt</artifactId>
<version>${org.mindrot.jbcrypt}</version> <version>${at.favre.lib.bcrypt.version}</version>
</dependency> </dependency>
<dependency> <dependency>