mirror of
https://github.com/sismics/docs.git
synced 2025-12-14 18:26:17 +00:00
Compare commits
36 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
59597e962d | ||
|
|
c85a951a9e | ||
|
|
7f47a17633 | ||
|
|
690c961a55 | ||
|
|
21efd1e4a7 | ||
|
|
ad27228429 | ||
|
|
dd4a1667ca | ||
|
|
399d2b7951 | ||
|
|
d51dfd6636 | ||
|
|
ca85c1fa9f | ||
|
|
5e7f06070e | ||
|
|
dc0c20cd0c | ||
|
|
98aa33341a | ||
|
|
1f7c0afc1e | ||
|
|
1ccce3f942 | ||
|
|
90d5bc8de7 | ||
|
|
c6a685d7c0 | ||
|
|
e6cfd899e5 | ||
|
|
bd23f14792 | ||
|
|
46f6b9e537 | ||
|
|
d5832c48e1 | ||
|
|
64ec0f63ca | ||
|
|
0b7c42e814 | ||
|
|
d8dc63fc98 | ||
|
|
81a7f154c2 | ||
|
|
af3263d471 | ||
|
|
bbe5f19997 | ||
|
|
f33650c099 | ||
|
|
58f81ec851 | ||
|
|
c9262eb204 | ||
|
|
3637b832e5 | ||
|
|
ee56cfe2b4 | ||
|
|
721410c7d0 | ||
|
|
f0310e3933 | ||
|
|
302d7cccc4 | ||
|
|
f9977d5ce6 |
84
.github/workflows/build-deploy.yml
vendored
Normal file
84
.github/workflows/build-deploy.yml
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
name: Maven CI/CD
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
tags: [v*]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build_and_publish:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up JDK 11
|
||||
uses: actions/setup-java@v2
|
||||
with:
|
||||
java-version: "11"
|
||||
distribution: "temurin"
|
||||
cache: maven
|
||||
- name: Install test dependencies
|
||||
run: sudo apt-get -y -q --no-install-recommends install ffmpeg mediainfo tesseract-ocr tesseract-ocr-deu
|
||||
- name: Build with Maven
|
||||
run: mvn -Pprod clean install
|
||||
- name: Upload war artifact
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: docs-web-ci.war
|
||||
path: docs-web/target/docs*.war
|
||||
|
||||
build_docker_image:
|
||||
name: Publish to Docker Hub
|
||||
runs-on: ubuntu-latest
|
||||
needs: [build_and_publish]
|
||||
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
-
|
||||
name: Download war artifact
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: docs-web-ci.war
|
||||
path: docs-web/target
|
||||
-
|
||||
name: Setup up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
-
|
||||
name: Login to DockerHub
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
-
|
||||
name: Populate Docker metadata
|
||||
id: metadata
|
||||
uses: docker/metadata-action@v3
|
||||
with:
|
||||
images: sismics/docs
|
||||
flavor: |
|
||||
latest=false
|
||||
tags: |
|
||||
type=ref,event=tag
|
||||
type=raw,value=latest,enable=${{ github.ref_type != 'tag' }}
|
||||
labels: |
|
||||
org.opencontainers.image.title = Teedy
|
||||
org.opencontainers.image.description = Teedy is an open source, lightweight document management system for individuals and businesses.
|
||||
org.opencontainers.image.created = ${{ github.event_created_at }}
|
||||
org.opencontainers.image.author = Sismics
|
||||
org.opencontainers.image.url = https://teedy.io/
|
||||
org.opencontainers.image.vendor = Sismics
|
||||
org.opencontainers.image.license = GPLv2
|
||||
org.opencontainers.image.version = ${{ github.event_head_commit.id }}
|
||||
-
|
||||
name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.metadata.outputs.tags }}
|
||||
labels: ${{ steps.metadata.outputs.labels }}
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -14,3 +14,8 @@ import_test
|
||||
teedy-importer-linux
|
||||
teedy-importer-macos
|
||||
teedy-importer-win.exe
|
||||
docs/*
|
||||
!docs/.gitkeep
|
||||
|
||||
#macos
|
||||
.DS_Store
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
FROM sismics/ubuntu-jetty:9.4.36
|
||||
FROM sismics/ubuntu-jetty:9.4.51
|
||||
LABEL maintainer="b.gamard@sismics.com"
|
||||
|
||||
RUN apt-get update && \
|
||||
@@ -7,6 +7,7 @@ RUN apt-get update && \
|
||||
mediainfo \
|
||||
tesseract-ocr \
|
||||
tesseract-ocr-ara \
|
||||
tesseract-ocr-ces \
|
||||
tesseract-ocr-chi-sim \
|
||||
tesseract-ocr-chi-tra \
|
||||
tesseract-ocr-dan \
|
||||
|
||||
57
README.md
57
README.md
@@ -14,8 +14,7 @@ Teedy is an open source, lightweight document management system for individuals
|
||||
|
||||

|
||||
|
||||
Demo
|
||||
----
|
||||
# Demo
|
||||
|
||||
A demo is available at [demo.teedy.io](https://demo.teedy.io)
|
||||
|
||||
@@ -23,8 +22,7 @@ A demo is available at [demo.teedy.io](https://demo.teedy.io)
|
||||
- "admin" login with "admin" password
|
||||
- "demo" login with "password" password
|
||||
|
||||
Features
|
||||
--------
|
||||
# Features
|
||||
|
||||
- Responsive user interface
|
||||
- Optical character recognition
|
||||
@@ -54,21 +52,20 @@ Features
|
||||
- [Bulk files importer](https://github.com/sismics/docs/tree/master/docs-importer) (single or scan mode)
|
||||
- Tested to one million documents
|
||||
|
||||
Install with Docker
|
||||
-------------------
|
||||
# Install with Docker
|
||||
|
||||
A preconfigured Docker image is available, including OCR and media conversion tools, listening on port 8080. The database is an embedded H2 database but PostgreSQL is also supported for more performance.
|
||||
|
||||
**The default admin password is "admin". Don't forget to change it before going to production.**
|
||||
|
||||
- Master branch, can be unstable. Not recommended for production use: `sismics/docs:latest`
|
||||
- Latest stable version: `sismics/docs:v1.10`
|
||||
- Latest stable version: `sismics/docs:v1.11`
|
||||
|
||||
The data directory is `/data`. Don't forget to mount a volume on it.
|
||||
|
||||
To build external URL, the server is expecting a `DOCS_BASE_URL` environment variable (for example https://teedy.mycompany.com)
|
||||
|
||||
### Available environment variables
|
||||
## Available environment variables
|
||||
|
||||
- General
|
||||
- `DOCS_BASE_URL`: The base url used by the application. Generated url's will be using this as base.
|
||||
@@ -94,18 +91,18 @@ To build external URL, the server is expecting a `DOCS_BASE_URL` environment var
|
||||
- `DOCS_SMTP_USERNAME`: The username to be used.
|
||||
- `DOCS_SMTP_PASSWORD`: The password to be used.
|
||||
|
||||
### Examples
|
||||
## Examples
|
||||
|
||||
In the following examples some passwords are exposed in cleartext. This was done in order to keep the examples simple. We strongly encourage you to use variables with an `.env` file or other means to securely store your passwords.
|
||||
|
||||
#### Using the internal database
|
||||
### Using the internal database
|
||||
|
||||
```yaml
|
||||
version: '3'
|
||||
services:
|
||||
# Teedy Application
|
||||
teedy-server:
|
||||
image: sismics/docs:v1.10
|
||||
image: sismics/docs:v1.11
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
# Map internal port to host
|
||||
@@ -121,14 +118,14 @@ services:
|
||||
- ./docs/data:/data
|
||||
```
|
||||
|
||||
#### Using PostgreSQL
|
||||
### Using PostgreSQL
|
||||
|
||||
```yaml
|
||||
version: '3'
|
||||
services:
|
||||
# Teedy Application
|
||||
teedy-server:
|
||||
image: sismics/docs:v1.10
|
||||
image: sismics/docs:v1.11
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
# Map internal port to host
|
||||
@@ -179,10 +176,9 @@ networks:
|
||||
driver: bridge
|
||||
```
|
||||
|
||||
Manual installation
|
||||
-------------------
|
||||
# Manual installation
|
||||
|
||||
#### Requirements
|
||||
## Requirements
|
||||
|
||||
- Java 11
|
||||
- Tesseract 4 for OCR
|
||||
@@ -190,13 +186,12 @@ Manual installation
|
||||
- mediainfo for video metadata extraction
|
||||
- A webapp server like [Jetty](http://eclipse.org/jetty/) or [Tomcat](http://tomcat.apache.org/)
|
||||
|
||||
#### Download
|
||||
## Download
|
||||
|
||||
The latest release is downloadable here: <https://github.com/sismics/docs/releases> in WAR format.
|
||||
**The default admin password is "admin". Don't forget to change it before going to production.**
|
||||
|
||||
How to build Teedy from the sources
|
||||
----------------------------------
|
||||
## How to build Teedy from the sources
|
||||
|
||||
Prerequisites: JDK 11, Maven 3, NPM, Grunt, Tesseract 4
|
||||
|
||||
@@ -209,35 +204,39 @@ Teedy is organized in several Maven modules:
|
||||
First off, clone the repository: `git clone git://github.com/sismics/docs.git`
|
||||
or download the sources from GitHub.
|
||||
|
||||
#### Launch the build
|
||||
### Launch the build
|
||||
|
||||
From the root directory:
|
||||
|
||||
mvn clean -DskipTests install
|
||||
```console
|
||||
mvn clean -DskipTests install
|
||||
```
|
||||
|
||||
#### Run a stand-alone version
|
||||
### Run a stand-alone version
|
||||
|
||||
From the `docs-web` directory:
|
||||
|
||||
mvn jetty:run
|
||||
```console
|
||||
mvn jetty:run
|
||||
```
|
||||
|
||||
#### Build a .war to deploy to your servlet container
|
||||
### Build a .war to deploy to your servlet container
|
||||
|
||||
From the `docs-web` directory:
|
||||
|
||||
mvn -Pprod -DskipTests clean install
|
||||
```console
|
||||
mvn -Pprod -DskipTests clean install
|
||||
```
|
||||
|
||||
You will get your deployable WAR in the `docs-web/target` directory.
|
||||
|
||||
Contributing
|
||||
------------
|
||||
# Contributing
|
||||
|
||||
All contributions are more than welcomed. Contributions may close an issue, fix a bug (reported or not reported), improve the existing code, add new feature, and so on.
|
||||
|
||||
The `master` branch is the default and base branch for the project. It is used for development and all Pull Requests should go there.
|
||||
|
||||
License
|
||||
-------
|
||||
# License
|
||||
|
||||
Teedy is released under the terms of the GPL license. See `COPYING` for more
|
||||
information or see <http://opensource.org/licenses/GPL-2.0>.
|
||||
|
||||
18
docker-compose.yml
Normal file
18
docker-compose.yml
Normal file
@@ -0,0 +1,18 @@
|
||||
version: '3'
|
||||
services:
|
||||
# Teedy Application
|
||||
teedy-server:
|
||||
image: sismics/docs:v1.10
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
# Map internal port to host
|
||||
- 8080:8080
|
||||
environment:
|
||||
# Base url to be used
|
||||
DOCS_BASE_URL: "https://docs.example.com"
|
||||
# Set the admin email
|
||||
DOCS_ADMIN_EMAIL_INIT: "admin@example.com"
|
||||
# Set the admin password (in this example: "superSecure")
|
||||
DOCS_ADMIN_PASSWORD_INIT: "$$2a$$05$$PcMNUbJvsk7QHFSfEIDaIOjk1VI9/E7IPjTKx.jkjPxkx2EOKSoPS"
|
||||
volumes:
|
||||
- ./docs/data:/data
|
||||
@@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>com.sismics.docs</groupId>
|
||||
<artifactId>docs-parent</artifactId>
|
||||
<version>1.10</version>
|
||||
<version>1.11</version>
|
||||
<relativePath>..</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -43,7 +43,7 @@ public class Constants {
|
||||
/**
|
||||
* Supported document languages.
|
||||
*/
|
||||
public static final List<String> SUPPORTED_LANGUAGES = Lists.newArrayList("eng", "fra", "ita", "deu", "spa", "por", "pol", "rus", "ukr", "ara", "hin", "chi_sim", "chi_tra", "jpn", "tha", "kor", "nld", "tur", "heb", "hun", "fin", "swe", "lav", "dan", "nor", "vie");
|
||||
public static final List<String> SUPPORTED_LANGUAGES = Lists.newArrayList("eng", "fra", "ita", "deu", "spa", "por", "pol", "rus", "ukr", "ara", "hin", "chi_sim", "chi_tra", "jpn", "tha", "kor", "nld", "tur", "heb", "hun", "fin", "swe", "lav", "dan", "nor", "vie", "ces");
|
||||
|
||||
/**
|
||||
* Base URL environment variable.
|
||||
|
||||
@@ -10,6 +10,7 @@ import com.sismics.util.context.ThreadLocalContext;
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.NoResultException;
|
||||
import javax.persistence.Query;
|
||||
import javax.persistence.TypedQuery;
|
||||
import java.sql.Timestamp;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
@@ -50,10 +51,9 @@ public class DocumentDao {
|
||||
* @param limit Limit
|
||||
* @return List of documents
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public List<Document> findAll(int offset, int limit) {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
Query q = em.createQuery("select d from Document d where d.deleteDate is null");
|
||||
TypedQuery<Document> q = em.createQuery("select d from Document d where d.deleteDate is null", Document.class);
|
||||
q.setFirstResult(offset);
|
||||
q.setMaxResults(limit);
|
||||
return q.getResultList();
|
||||
@@ -65,10 +65,9 @@ public class DocumentDao {
|
||||
* @param userId User ID
|
||||
* @return List of documents
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public List<Document> findByUserId(String userId) {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
Query q = em.createQuery("select d from Document d where d.userId = :userId and d.deleteDate is null");
|
||||
TypedQuery<Document> q = em.createQuery("select d from Document d where d.userId = :userId and d.deleteDate is null", Document.class);
|
||||
q.setParameter("userId", userId);
|
||||
return q.getResultList();
|
||||
}
|
||||
@@ -138,16 +137,16 @@ public class DocumentDao {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
|
||||
// Get the document
|
||||
Query q = em.createQuery("select d from Document d where d.id = :id and d.deleteDate is null");
|
||||
q.setParameter("id", id);
|
||||
Document documentDb = (Document) q.getSingleResult();
|
||||
TypedQuery<Document> dq = em.createQuery("select d from Document d where d.id = :id and d.deleteDate is null", Document.class);
|
||||
dq.setParameter("id", id);
|
||||
Document documentDb = dq.getSingleResult();
|
||||
|
||||
// Delete the document
|
||||
Date dateNow = new Date();
|
||||
documentDb.setDeleteDate(dateNow);
|
||||
|
||||
// Delete linked data
|
||||
q = em.createQuery("update File f set f.deleteDate = :dateNow where f.documentId = :documentId and f.deleteDate is null");
|
||||
Query q = em.createQuery("update File f set f.deleteDate = :dateNow where f.documentId = :documentId and f.deleteDate is null");
|
||||
q.setParameter("documentId", id);
|
||||
q.setParameter("dateNow", dateNow);
|
||||
q.executeUpdate();
|
||||
@@ -179,10 +178,10 @@ public class DocumentDao {
|
||||
*/
|
||||
public Document getById(String id) {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
Query q = em.createQuery("select d from Document d where d.id = :id and d.deleteDate is null");
|
||||
TypedQuery<Document> q = em.createQuery("select d from Document d where d.id = :id and d.deleteDate is null", Document.class);
|
||||
q.setParameter("id", id);
|
||||
try {
|
||||
return (Document) q.getSingleResult();
|
||||
return q.getSingleResult();
|
||||
} catch (NoResultException e) {
|
||||
return null;
|
||||
}
|
||||
@@ -199,9 +198,9 @@ public class DocumentDao {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
|
||||
// Get the document
|
||||
Query q = em.createQuery("select d from Document d where d.id = :id and d.deleteDate is null");
|
||||
TypedQuery<Document> q = em.createQuery("select d from Document d where d.id = :id and d.deleteDate is null", Document.class);
|
||||
q.setParameter("id", document.getId());
|
||||
Document documentDb = (Document) q.getSingleResult();
|
||||
Document documentDb = q.getSingleResult();
|
||||
|
||||
// Update the document
|
||||
documentDb.setTitle(document.getTitle());
|
||||
@@ -237,7 +236,6 @@ public class DocumentDao {
|
||||
query.setParameter("fileId", document.getFileId());
|
||||
query.setParameter("id", document.getId());
|
||||
query.executeUpdate();
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -7,7 +7,8 @@ import com.sismics.util.context.ThreadLocalContext;
|
||||
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.NoResultException;
|
||||
import javax.persistence.Query;
|
||||
import javax.persistence.TypedQuery;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
@@ -47,10 +48,9 @@ public class FileDao {
|
||||
* @param limit Limit
|
||||
* @return List of files
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public List<File> findAll(int offset, int limit) {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
Query q = em.createQuery("select f from File f where f.deleteDate is null");
|
||||
TypedQuery<File> q = em.createQuery("select f from File f where f.deleteDate is null", File.class);
|
||||
q.setFirstResult(offset);
|
||||
q.setMaxResults(limit);
|
||||
return q.getResultList();
|
||||
@@ -62,28 +62,38 @@ public class FileDao {
|
||||
* @param userId User ID
|
||||
* @return List of files
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public List<File> findByUserId(String userId) {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
Query q = em.createQuery("select f from File f where f.userId = :userId and f.deleteDate is null");
|
||||
TypedQuery<File> q = em.createQuery("select f from File f where f.userId = :userId and f.deleteDate is null", File.class);
|
||||
q.setParameter("userId", userId);
|
||||
return q.getResultList();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an active file.
|
||||
* Returns a list of active files.
|
||||
*
|
||||
* @param ids Files IDs
|
||||
* @return List of files
|
||||
*/
|
||||
public List<File> getFiles(List<String> ids) {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
TypedQuery<File> q = em.createQuery("select f from File f where f.id in :ids and f.deleteDate is null", File.class);
|
||||
q.setParameter("ids", ids);
|
||||
return q.getResultList();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an active file or null.
|
||||
*
|
||||
* @param id File ID
|
||||
* @return Document
|
||||
* @return File
|
||||
*/
|
||||
public File getFile(String id) {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
Query q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null");
|
||||
q.setParameter("id", id);
|
||||
try {
|
||||
return (File) q.getSingleResult();
|
||||
} catch (NoResultException e) {
|
||||
List<File> files = getFiles(List.of(id));
|
||||
if (files.isEmpty()) {
|
||||
return null;
|
||||
} else {
|
||||
return files.get(0);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -92,15 +102,15 @@ public class FileDao {
|
||||
*
|
||||
* @param id File ID
|
||||
* @param userId User ID
|
||||
* @return Document
|
||||
* @return File
|
||||
*/
|
||||
public File getFile(String id, String userId) {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
Query q = em.createQuery("select f from File f where f.id = :id and f.userId = :userId and f.deleteDate is null");
|
||||
TypedQuery<File> q = em.createQuery("select f from File f where f.id = :id and f.userId = :userId and f.deleteDate is null", File.class);
|
||||
q.setParameter("id", id);
|
||||
q.setParameter("userId", userId);
|
||||
try {
|
||||
return (File) q.getSingleResult();
|
||||
return q.getSingleResult();
|
||||
} catch (NoResultException e) {
|
||||
return null;
|
||||
}
|
||||
@@ -116,9 +126,9 @@ public class FileDao {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
|
||||
// Get the file
|
||||
Query q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null");
|
||||
TypedQuery<File> q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null", File.class);
|
||||
q.setParameter("id", id);
|
||||
File fileDb = (File) q.getSingleResult();
|
||||
File fileDb = q.getSingleResult();
|
||||
|
||||
// Delete the file
|
||||
Date dateNow = new Date();
|
||||
@@ -138,9 +148,9 @@ public class FileDao {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
|
||||
// Get the file
|
||||
Query q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null");
|
||||
TypedQuery<File> q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null", File.class);
|
||||
q.setParameter("id", file.getId());
|
||||
File fileDb = (File) q.getSingleResult();
|
||||
File fileDb = q.getSingleResult();
|
||||
|
||||
// Update the file
|
||||
fileDb.setDocumentId(file.getDocumentId());
|
||||
@@ -162,32 +172,43 @@ public class FileDao {
|
||||
*/
|
||||
public File getActiveById(String id) {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
Query q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null");
|
||||
TypedQuery<File> q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null", File.class);
|
||||
q.setParameter("id", id);
|
||||
try {
|
||||
return (File) q.getSingleResult();
|
||||
return q.getSingleResult();
|
||||
} catch (NoResultException e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get files by document ID or all orphan files of an user.
|
||||
* Get files by document ID or all orphan files of a user.
|
||||
*
|
||||
* @param userId User ID
|
||||
* @param documentId Document ID
|
||||
* @return List of files
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public List<File> getByDocumentId(String userId, String documentId) {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
if (documentId == null) {
|
||||
Query q = em.createQuery("select f from File f where f.documentId is null and f.deleteDate is null and f.latestVersion = true and f.userId = :userId order by f.createDate asc");
|
||||
TypedQuery<File> q = em.createQuery("select f from File f where f.documentId is null and f.deleteDate is null and f.latestVersion = true and f.userId = :userId order by f.createDate asc", File.class);
|
||||
q.setParameter("userId", userId);
|
||||
return q.getResultList();
|
||||
} else {
|
||||
return getByDocumentsIds(Collections.singleton(documentId));
|
||||
}
|
||||
Query q = em.createQuery("select f from File f where f.documentId = :documentId and f.latestVersion = true and f.deleteDate is null order by f.order asc");
|
||||
q.setParameter("documentId", documentId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get files by documents IDs.
|
||||
*
|
||||
* @param documentIds Documents IDs
|
||||
* @return List of files
|
||||
*/
|
||||
public List<File> getByDocumentsIds(Iterable<String> documentIds) {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
TypedQuery<File> q = em.createQuery("select f from File f where f.documentId in :documentIds and f.latestVersion = true and f.deleteDate is null order by f.order asc", File.class);
|
||||
q.setParameter("documentIds", documentIds);
|
||||
return q.getResultList();
|
||||
}
|
||||
|
||||
@@ -197,10 +218,9 @@ public class FileDao {
|
||||
* @param versionId Version ID
|
||||
* @return List of files
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public List<File> getByVersionId(String versionId) {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
Query q = em.createQuery("select f from File f where f.versionId = :versionId and f.deleteDate is null order by f.order asc");
|
||||
TypedQuery<File> q = em.createQuery("select f from File f where f.versionId = :versionId and f.deleteDate is null order by f.order asc", File.class);
|
||||
q.setParameter("versionId", versionId);
|
||||
return q.getResultList();
|
||||
}
|
||||
|
||||
@@ -184,10 +184,8 @@ public class GroupDao {
|
||||
|
||||
criteriaList.add("g.GRP_DELETEDATE_D is null");
|
||||
|
||||
if (!criteriaList.isEmpty()) {
|
||||
sb.append(" where ");
|
||||
sb.append(Joiner.on(" and ").join(criteriaList));
|
||||
}
|
||||
|
||||
// Perform the search
|
||||
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);
|
||||
|
||||
@@ -123,10 +123,8 @@ public class MetadataDao {
|
||||
|
||||
criteriaList.add("m.MET_DELETEDATE_D is null");
|
||||
|
||||
if (!criteriaList.isEmpty()) {
|
||||
sb.append(" where ");
|
||||
sb.append(Joiner.on(" and ").join(criteriaList));
|
||||
}
|
||||
|
||||
// Perform the search
|
||||
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);
|
||||
|
||||
@@ -64,10 +64,8 @@ public class RouteDao {
|
||||
}
|
||||
criteriaList.add("r.RTE_DELETEDATE_D is null");
|
||||
|
||||
if (!criteriaList.isEmpty()) {
|
||||
sb.append(" where ");
|
||||
sb.append(Joiner.on(" and ").join(criteriaList));
|
||||
}
|
||||
|
||||
// Perform the search
|
||||
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);
|
||||
|
||||
@@ -145,10 +145,8 @@ public class RouteModelDao {
|
||||
|
||||
criteriaList.add("rm.RTM_DELETEDATE_D is null");
|
||||
|
||||
if (!criteriaList.isEmpty()) {
|
||||
sb.append(" where ");
|
||||
sb.append(Joiner.on(" and ").join(criteriaList));
|
||||
}
|
||||
|
||||
// Perform the search
|
||||
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);
|
||||
|
||||
@@ -90,10 +90,8 @@ public class RouteStepDao {
|
||||
}
|
||||
criteriaList.add("rs.RTP_DELETEDATE_D is null");
|
||||
|
||||
if (!criteriaList.isEmpty()) {
|
||||
sb.append(" where ");
|
||||
sb.append(Joiner.on(" and ").join(criteriaList));
|
||||
}
|
||||
|
||||
// Perform the search
|
||||
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);
|
||||
|
||||
@@ -19,7 +19,6 @@ public class ShareDao {
|
||||
*
|
||||
* @param share Share
|
||||
* @return New ID
|
||||
* @throws Exception
|
||||
*/
|
||||
public String create(Share share) {
|
||||
// Create the UUID
|
||||
|
||||
@@ -199,10 +199,8 @@ public class TagDao {
|
||||
|
||||
criteriaList.add("t.TAG_DELETEDATE_D is null");
|
||||
|
||||
if (!criteriaList.isEmpty()) {
|
||||
sb.append(" where ");
|
||||
sb.append(Joiner.on(" and ").join(criteriaList));
|
||||
}
|
||||
|
||||
// Perform the search
|
||||
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package com.sismics.docs.core.dao;
|
||||
|
||||
import com.google.common.base.Joiner;
|
||||
import com.google.common.base.Strings;
|
||||
import at.favre.lib.crypto.bcrypt.BCrypt;
|
||||
import org.joda.time.DateTime;
|
||||
import org.slf4j.Logger;
|
||||
@@ -289,7 +290,7 @@ public class UserDao {
|
||||
private String hashPassword(String password) {
|
||||
int bcryptWork = Constants.DEFAULT_BCRYPT_WORK;
|
||||
String envBcryptWork = System.getenv(Constants.BCRYPT_WORK_ENV);
|
||||
if (envBcryptWork != null) {
|
||||
if (!Strings.isNullOrEmpty(envBcryptWork)) {
|
||||
try {
|
||||
int envBcryptWorkInt = Integer.parseInt(envBcryptWork);
|
||||
if (envBcryptWorkInt >= 4 && envBcryptWorkInt <= 31) {
|
||||
|
||||
@@ -20,7 +20,6 @@ public class VocabularyDao {
|
||||
*
|
||||
* @param vocabulary Vocabulary
|
||||
* @return New ID
|
||||
* @throws Exception
|
||||
*/
|
||||
public String create(Vocabulary vocabulary) {
|
||||
// Create the UUID
|
||||
|
||||
@@ -42,10 +42,8 @@ public class WebhookDao {
|
||||
}
|
||||
criteriaList.add("w.WHK_DELETEDATE_D is null");
|
||||
|
||||
if (!criteriaList.isEmpty()) {
|
||||
sb.append(" where ");
|
||||
sb.append(Joiner.on(" and ").join(criteriaList));
|
||||
}
|
||||
|
||||
// Perform the search
|
||||
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
package com.sismics.docs.core.dao.criteria;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
@@ -49,13 +50,13 @@ public class DocumentCriteria {
|
||||
* Tag IDs.
|
||||
* The first level list will be AND'ed and the second level list will be OR'ed.
|
||||
*/
|
||||
private List<List<String>> tagIdList;
|
||||
private List<List<String>> tagIdList = new ArrayList<>();
|
||||
|
||||
/**
|
||||
* Tag IDs to exclude.
|
||||
* The first and second level list will be excluded.
|
||||
*/
|
||||
private List<List<String>> excludedTagIdList;
|
||||
private List<List<String>> excludedTagIdList = new ArrayList<>();
|
||||
|
||||
/**
|
||||
* Shared status.
|
||||
@@ -131,19 +132,10 @@ public class DocumentCriteria {
|
||||
return tagIdList;
|
||||
}
|
||||
|
||||
public void setTagIdList(List<List<String>> tagIdList) {
|
||||
this.tagIdList = tagIdList;
|
||||
}
|
||||
|
||||
public List<List<String>> getExcludedTagIdList() {
|
||||
return excludedTagIdList;
|
||||
}
|
||||
|
||||
public DocumentCriteria setExcludedTagIdList(List<List<String>> excludedTagIdList) {
|
||||
this.excludedTagIdList = excludedTagIdList;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Boolean getShared() {
|
||||
return shared;
|
||||
}
|
||||
@@ -168,10 +160,6 @@ public class DocumentCriteria {
|
||||
this.creatorId = creatorId;
|
||||
}
|
||||
|
||||
public Boolean getActiveRoute() {
|
||||
return activeRoute;
|
||||
}
|
||||
|
||||
public Date getUpdateDateMin() {
|
||||
return updateDateMin;
|
||||
}
|
||||
@@ -188,6 +176,10 @@ public class DocumentCriteria {
|
||||
this.updateDateMax = updateDateMax;
|
||||
}
|
||||
|
||||
public Boolean getActiveRoute() {
|
||||
return activeRoute;
|
||||
}
|
||||
|
||||
public void setActiveRoute(Boolean activeRoute) {
|
||||
this.activeRoute = activeRoute;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
package com.sismics.docs.core.model.context;
|
||||
|
||||
import com.google.common.base.Strings;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.eventbus.AsyncEventBus;
|
||||
import com.google.common.eventbus.EventBus;
|
||||
@@ -106,7 +107,7 @@ public class AppContext {
|
||||
|
||||
// Change the admin password if needed
|
||||
String envAdminPassword = System.getenv(Constants.ADMIN_PASSWORD_INIT_ENV);
|
||||
if (envAdminPassword != null) {
|
||||
if (!Strings.isNullOrEmpty(envAdminPassword)) {
|
||||
UserDao userDao = new UserDao();
|
||||
User adminUser = userDao.getById("admin");
|
||||
if (Constants.DEFAULT_ADMIN_PASSWORD.equals(adminUser.getPassword())) {
|
||||
@@ -117,7 +118,7 @@ public class AppContext {
|
||||
|
||||
// Change the admin email if needed
|
||||
String envAdminEmail = System.getenv(Constants.ADMIN_EMAIL_INIT_ENV);
|
||||
if (envAdminEmail != null) {
|
||||
if (!Strings.isNullOrEmpty(envAdminEmail)) {
|
||||
UserDao userDao = new UserDao();
|
||||
User adminUser = userDao.getById("admin");
|
||||
if (Constants.DEFAULT_ADMIN_EMAIL.equals(adminUser.getEmail())) {
|
||||
|
||||
@@ -69,13 +69,18 @@ public class FileService extends AbstractScheduledService {
|
||||
return Scheduler.newFixedDelaySchedule(0, 5, TimeUnit.SECONDS);
|
||||
}
|
||||
|
||||
public Path createTemporaryFile() throws IOException {
|
||||
return createTemporaryFile(null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a temporary file.
|
||||
*
|
||||
* @param name Wanted file name
|
||||
* @return New temporary file
|
||||
*/
|
||||
public Path createTemporaryFile() throws IOException {
|
||||
Path path = Files.createTempFile("sismics_docs", null);
|
||||
public Path createTemporaryFile(String name) throws IOException {
|
||||
Path path = Files.createTempFile("sismics_docs", name);
|
||||
referenceSet.add(new TemporaryPathReference(path, referenceQueue));
|
||||
return path;
|
||||
}
|
||||
|
||||
@@ -85,7 +85,7 @@ public class InboxService extends AbstractScheduledService {
|
||||
lastSyncDate = new Date();
|
||||
lastSyncMessageCount = 0;
|
||||
try {
|
||||
HashMap<String, String> tagsNameToId = getAllTags();
|
||||
Map<String, String> tagsNameToId = getAllTags();
|
||||
|
||||
inbox = openInbox();
|
||||
Message[] messages = inbox.search(new FlagTerm(new Flags(Flags.Flag.SEEN), false));
|
||||
@@ -192,7 +192,7 @@ public class InboxService extends AbstractScheduledService {
|
||||
* @param message Message
|
||||
* @throws Exception e
|
||||
*/
|
||||
private void importMessage(Message message, HashMap<String, String> tags) throws Exception {
|
||||
private void importMessage(Message message, Map<String, String> tags) throws Exception {
|
||||
log.info("Importing message: " + message.getSubject());
|
||||
|
||||
// Parse the mail
|
||||
@@ -273,16 +273,16 @@ public class InboxService extends AbstractScheduledService {
|
||||
/**
|
||||
* Fetches a HashMap with all tag names as keys and their respective ids as values.
|
||||
*
|
||||
* @return HashMap with all tags or null if not enabled
|
||||
* @return Map with all tags or null if not enabled
|
||||
*/
|
||||
private HashMap<String, String> getAllTags() {
|
||||
private Map<String, String> getAllTags() {
|
||||
if (!ConfigUtil.getConfigBooleanValue(ConfigType.INBOX_AUTOMATIC_TAGS)) {
|
||||
return null;
|
||||
}
|
||||
TagDao tagDao = new TagDao();
|
||||
List<TagDto> tags = tagDao.findByCriteria(new TagCriteria().setTargetIdList(null), new SortCriteria(1, true));
|
||||
|
||||
HashMap<String, String> tagsNameToId = new HashMap<>();
|
||||
Map<String, String> tagsNameToId = new HashMap<>();
|
||||
for (TagDto tagDto : tags) {
|
||||
tagsNameToId.put(tagDto.getName(), tagDto.getId());
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
package com.sismics.docs.core.util;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import com.google.common.base.Strings;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.io.CharStreams;
|
||||
@@ -28,6 +27,7 @@ import java.awt.image.BufferedImage;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.*;
|
||||
@@ -76,7 +76,7 @@ public class FileUtil {
|
||||
|
||||
// Consume the data as text
|
||||
try (InputStream is = process.getInputStream()) {
|
||||
return CharStreams.toString(new InputStreamReader(is, Charsets.UTF_8));
|
||||
return CharStreams.toString(new InputStreamReader(is, StandardCharsets.UTF_8));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
package com.sismics.docs.core.util;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import com.sismics.docs.core.dao.dto.TagDto;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
@@ -12,14 +12,14 @@ import java.util.List;
|
||||
*/
|
||||
public class TagUtil {
|
||||
/**
|
||||
* Recursively find children of a tags.
|
||||
* Recursively find children of a tag.
|
||||
*
|
||||
* @param parentTagDto Parent tag
|
||||
* @param allTagDtoList List of all tags
|
||||
* @return Children tags
|
||||
*/
|
||||
public static List<TagDto> findChildren(TagDto parentTagDto, List<TagDto> allTagDtoList) {
|
||||
List<TagDto> childrenTagDtoList = Lists.newArrayList();
|
||||
List<TagDto> childrenTagDtoList = new ArrayList<>();
|
||||
|
||||
for (TagDto tagDto : allTagDtoList) {
|
||||
if (parentTagDto.getId().equals(tagDto.getParentId())) {
|
||||
@@ -32,15 +32,15 @@ public class TagUtil {
|
||||
}
|
||||
|
||||
/**
|
||||
* Find tags by name (start with).
|
||||
* Find tags by name (start with, ignore case).
|
||||
*
|
||||
* @param name Name
|
||||
* @param allTagDtoList List of all tags
|
||||
* @return List of filtered tags
|
||||
*/
|
||||
public static List<TagDto> findByName(String name, List<TagDto> allTagDtoList) {
|
||||
List<TagDto> tagDtoList = Lists.newArrayList();
|
||||
if (name == null || name.isEmpty()) {
|
||||
List<TagDto> tagDtoList = new ArrayList<>();
|
||||
if (name.isEmpty()) {
|
||||
return tagDtoList;
|
||||
}
|
||||
name = name.toLowerCase();
|
||||
|
||||
@@ -11,6 +11,7 @@ import org.apache.pdfbox.pdmodel.PDDocument;
|
||||
|
||||
import java.awt.image.BufferedImage;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
|
||||
@@ -33,7 +34,7 @@ public class TextPlainFormatHandler implements FormatHandler {
|
||||
PdfWriter.getInstance(output, pdfOutputStream);
|
||||
|
||||
output.open();
|
||||
String content = new String(Files.readAllBytes(file), Charsets.UTF_8);
|
||||
String content = Files.readString(file, StandardCharsets.UTF_8);
|
||||
Font font = FontFactory.getFont("LiberationMono-Regular");
|
||||
Paragraph paragraph = new Paragraph(content, font);
|
||||
paragraph.setAlignment(Element.ALIGN_LEFT);
|
||||
@@ -46,7 +47,7 @@ public class TextPlainFormatHandler implements FormatHandler {
|
||||
|
||||
@Override
|
||||
public String extractContent(String language, Path file) throws Exception {
|
||||
return new String(Files.readAllBytes(file), "UTF-8");
|
||||
return Files.readString(file, StandardCharsets.UTF_8);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
package com.sismics.docs.core.util.format;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.io.ByteStreams;
|
||||
import com.google.common.io.Closer;
|
||||
@@ -13,6 +12,7 @@ import javax.imageio.ImageIO;
|
||||
import java.awt.image.BufferedImage;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
@@ -65,7 +65,7 @@ public class VideoFormatHandler implements FormatHandler {
|
||||
|
||||
// Consume the data as a string
|
||||
try (InputStream is = process.getInputStream()) {
|
||||
return new String(ByteStreams.toByteArray(is), Charsets.UTF_8);
|
||||
return new String(ByteStreams.toByteArray(is), StandardCharsets.UTF_8);
|
||||
} catch (Exception e) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -299,7 +299,7 @@ public class LuceneIndexingHandler implements IndexingHandler {
|
||||
criteriaList.add("d.DOC_TITLE_C = :title");
|
||||
parameterMap.put("title", criteria.getTitle());
|
||||
}
|
||||
if (criteria.getTagIdList() != null && !criteria.getTagIdList().isEmpty()) {
|
||||
if (!criteria.getTagIdList().isEmpty()) {
|
||||
int index = 0;
|
||||
for (List<String> tagIdList : criteria.getTagIdList()) {
|
||||
List<String> tagCriteriaList = Lists.newArrayList();
|
||||
|
||||
@@ -29,6 +29,7 @@ import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.StringReader;
|
||||
import java.io.StringWriter;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
@@ -87,12 +88,12 @@ public class EmailUtil {
|
||||
try {
|
||||
// Build email headers
|
||||
HtmlEmail email = new HtmlEmail();
|
||||
email.setCharset("UTF-8");
|
||||
email.setCharset(StandardCharsets.UTF_8.name());
|
||||
ConfigDao configDao = new ConfigDao();
|
||||
|
||||
// Hostname
|
||||
String envHostname = System.getenv(Constants.SMTP_HOSTNAME_ENV);
|
||||
if (envHostname == null) {
|
||||
if (Strings.isNullOrEmpty(envHostname)) {
|
||||
email.setHostName(ConfigUtil.getConfigStringValue(ConfigType.SMTP_HOSTNAME));
|
||||
} else {
|
||||
email.setHostName(envHostname);
|
||||
@@ -101,7 +102,7 @@ public class EmailUtil {
|
||||
// Port
|
||||
int port = ConfigUtil.getConfigIntegerValue(ConfigType.SMTP_PORT);
|
||||
String envPort = System.getenv(Constants.SMTP_PORT_ENV);
|
||||
if (envPort != null) {
|
||||
if (!Strings.isNullOrEmpty(envPort)) {
|
||||
port = Integer.valueOf(envPort);
|
||||
}
|
||||
email.setSmtpPort(port);
|
||||
@@ -114,7 +115,7 @@ public class EmailUtil {
|
||||
// Username and password
|
||||
String envUsername = System.getenv(Constants.SMTP_USERNAME_ENV);
|
||||
String envPassword = System.getenv(Constants.SMTP_PASSWORD_ENV);
|
||||
if (envUsername == null || envPassword == null) {
|
||||
if (Strings.isNullOrEmpty(envUsername) || Strings.isNullOrEmpty(envPassword)) {
|
||||
Config usernameConfig = configDao.getById(ConfigType.SMTP_USERNAME);
|
||||
Config passwordConfig = configDao.getById(ConfigType.SMTP_PASSWORD);
|
||||
if (usernameConfig != null && passwordConfig != null) {
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
package com.sismics.util;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import com.google.common.hash.Hashing;
|
||||
|
||||
import javax.imageio.IIOImage;
|
||||
@@ -13,6 +12,7 @@ import java.awt.image.BufferedImage;
|
||||
import java.awt.image.WritableRaster;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Iterator;
|
||||
|
||||
/**
|
||||
@@ -80,7 +80,7 @@ public class ImageUtil {
|
||||
}
|
||||
|
||||
return Hashing.md5().hashString(
|
||||
email.trim().toLowerCase(), Charsets.UTF_8)
|
||||
email.trim().toLowerCase(), StandardCharsets.UTF_8)
|
||||
.toString();
|
||||
}
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@ import java.io.IOException;
|
||||
import java.net.URISyntaxException;
|
||||
import java.net.URL;
|
||||
import java.net.URLDecoder;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.text.MessageFormat;
|
||||
import java.util.*;
|
||||
import java.util.jar.JarEntry;
|
||||
@@ -53,7 +54,7 @@ public class ResourceUtil {
|
||||
|
||||
// Extract the JAR path
|
||||
String jarPath = dirUrl.getPath().substring(5, dirUrl.getPath().indexOf("!"));
|
||||
JarFile jar = new JarFile(URLDecoder.decode(jarPath, "UTF-8"));
|
||||
JarFile jar = new JarFile(URLDecoder.decode(jarPath, StandardCharsets.UTF_8));
|
||||
Set<String> fileSet = new HashSet<String>();
|
||||
|
||||
try {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
package com.sismics.util.jpa;
|
||||
|
||||
import com.google.common.base.Strings;
|
||||
import com.sismics.docs.core.util.DirectoryUtil;
|
||||
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
|
||||
import org.hibernate.internal.util.config.ConfigurationHelper;
|
||||
@@ -83,7 +84,7 @@ public final class EMF {
|
||||
Map<Object, Object> props = new HashMap<>();
|
||||
Path dbDirectory = DirectoryUtil.getDbDirectory();
|
||||
String dbFile = dbDirectory.resolve("docs").toAbsolutePath().toString();
|
||||
if (databaseUrl == null) {
|
||||
if (Strings.isNullOrEmpty(databaseUrl)) {
|
||||
props.put("hibernate.connection.driver_class", "org.h2.Driver");
|
||||
props.put("hibernate.dialect", "org.hibernate.dialect.HSQLDialect");
|
||||
props.put("hibernate.connection.url", "jdbc:h2:file:" + dbFile + ";CACHE_SIZE=65536;LOCK_TIMEOUT=10000");
|
||||
|
||||
@@ -1,15 +1,9 @@
|
||||
package com.sismics.util.mime;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import org.apache.commons.compress.utils.IOUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.URLConnection;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipInputStream;
|
||||
|
||||
/**
|
||||
* Utility to check MIME types.
|
||||
@@ -18,7 +12,7 @@ import java.util.zip.ZipInputStream;
|
||||
*/
|
||||
public class MimeTypeUtil {
|
||||
/**
|
||||
* Try to guess the MIME type of a file by its magic number (header).
|
||||
* Try to guess the MIME type of a file.
|
||||
*
|
||||
* @param file File to inspect
|
||||
* @param name File name
|
||||
@@ -26,59 +20,19 @@ public class MimeTypeUtil {
|
||||
* @throws IOException e
|
||||
*/
|
||||
public static String guessMimeType(Path file, String name) throws IOException {
|
||||
String mimeType;
|
||||
try (InputStream is = Files.newInputStream(file)) {
|
||||
byte[] headerBytes = new byte[64];
|
||||
is.read(headerBytes);
|
||||
mimeType = guessMimeType(headerBytes, name);
|
||||
}
|
||||
|
||||
return guessOpenDocumentFormat(mimeType, file);
|
||||
}
|
||||
|
||||
/**
|
||||
* Try to guess the MIME type of a file by its magic number (header).
|
||||
*
|
||||
* @param headerBytes File header (first bytes)
|
||||
* @param name File name
|
||||
* @return MIME type
|
||||
* @throws UnsupportedEncodingException e
|
||||
*/
|
||||
public static String guessMimeType(byte[] headerBytes, String name) throws UnsupportedEncodingException {
|
||||
String header = new String(headerBytes, "US-ASCII");
|
||||
|
||||
// Detect by header bytes
|
||||
if (header.startsWith("PK")) {
|
||||
return MimeType.APPLICATION_ZIP;
|
||||
} else if (header.startsWith("GIF87a") || header.startsWith("GIF89a")) {
|
||||
return MimeType.IMAGE_GIF;
|
||||
} else if (headerBytes[0] == ((byte) 0xff) && headerBytes[1] == ((byte) 0xd8)) {
|
||||
return MimeType.IMAGE_JPEG;
|
||||
} else if (headerBytes[0] == ((byte) 0x89) && headerBytes[1] == ((byte) 0x50) && headerBytes[2] == ((byte) 0x4e) && headerBytes[3] == ((byte) 0x47) &&
|
||||
headerBytes[4] == ((byte) 0x0d) && headerBytes[5] == ((byte) 0x0a) && headerBytes[6] == ((byte) 0x1a) && headerBytes[7] == ((byte) 0x0a)) {
|
||||
return MimeType.IMAGE_PNG;
|
||||
} else if (headerBytes[0] == ((byte) 0x25) && headerBytes[1] == ((byte) 0x50) && headerBytes[2] == ((byte) 0x44) && headerBytes[3] == ((byte) 0x46)) {
|
||||
return MimeType.APPLICATION_PDF;
|
||||
} else if (headerBytes[0] == ((byte) 0x00) && headerBytes[1] == ((byte) 0x00) && headerBytes[2] == ((byte) 0x00)
|
||||
&& (headerBytes[3] == ((byte) 0x14) || headerBytes[3] == ((byte) 0x18) || headerBytes[3] == ((byte) 0x20))
|
||||
&& headerBytes[4] == ((byte) 0x66) && headerBytes[5] == ((byte) 0x74) && headerBytes[6] == ((byte) 0x79) && headerBytes[7] == ((byte) 0x70)) {
|
||||
return MimeType.VIDEO_MP4;
|
||||
} else if (headerBytes[0] == ((byte) 0x1a) && headerBytes[1] == ((byte) 0x45) && headerBytes[2] == ((byte) 0xdf) && headerBytes[3] == ((byte) 0xa3)) {
|
||||
return MimeType.VIDEO_WEBM;
|
||||
}
|
||||
|
||||
// Detect by file extension
|
||||
if (name != null) {
|
||||
if (name.endsWith(".txt")) {
|
||||
return MimeType.TEXT_PLAIN;
|
||||
} else if (name.endsWith(".csv")) {
|
||||
return MimeType.TEXT_CSV;
|
||||
}
|
||||
String mimeType = Files.probeContentType(file);
|
||||
|
||||
if (mimeType == null && name != null) {
|
||||
mimeType = URLConnection.getFileNameMap().getContentTypeFor(name);
|
||||
}
|
||||
|
||||
if (mimeType == null) {
|
||||
return MimeType.DEFAULT;
|
||||
}
|
||||
|
||||
return mimeType;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a file extension linked to a MIME type.
|
||||
*
|
||||
@@ -113,52 +67,4 @@ public class MimeTypeUtil {
|
||||
return "bin";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Guess the MIME type of open document formats (docx and odt).
|
||||
* It's more costly than the simple header check, but needed because open document formats
|
||||
* are simple ZIP files on the outside and much bigger on the inside.
|
||||
*
|
||||
* @param mimeType Currently detected MIME type
|
||||
* @param file File on disk
|
||||
* @return MIME type
|
||||
*/
|
||||
private static String guessOpenDocumentFormat(String mimeType, Path file) {
|
||||
if (!MimeType.APPLICATION_ZIP.equals(mimeType)) {
|
||||
// open document formats are ZIP files
|
||||
return mimeType;
|
||||
}
|
||||
|
||||
try (InputStream inputStream = Files.newInputStream(file);
|
||||
ZipInputStream zipInputStream = new ZipInputStream(inputStream, Charsets.ISO_8859_1)) {
|
||||
ZipEntry archiveEntry = zipInputStream.getNextEntry();
|
||||
while (archiveEntry != null) {
|
||||
if (archiveEntry.getName().equals("mimetype")) {
|
||||
// Maybe it's an ODT file
|
||||
String content = new String(IOUtils.toByteArray(zipInputStream), Charsets.ISO_8859_1);
|
||||
if (MimeType.OPEN_DOCUMENT_TEXT.equals(content.trim())) {
|
||||
mimeType = MimeType.OPEN_DOCUMENT_TEXT;
|
||||
break;
|
||||
}
|
||||
} else if (archiveEntry.getName().equals("[Content_Types].xml")) {
|
||||
// Maybe it's a DOCX file
|
||||
String content = new String(IOUtils.toByteArray(zipInputStream), Charsets.ISO_8859_1);
|
||||
if (content.contains(MimeType.OFFICE_DOCUMENT)) {
|
||||
mimeType = MimeType.OFFICE_DOCUMENT;
|
||||
break;
|
||||
} else if (content.contains(MimeType.OFFICE_PRESENTATION)) {
|
||||
mimeType = MimeType.OFFICE_PRESENTATION;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
archiveEntry = zipInputStream.getNextEntry();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
// In case of any error, just give up and keep the ZIP MIME type
|
||||
return mimeType;
|
||||
}
|
||||
|
||||
return mimeType;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,7 +15,7 @@ import java.nio.file.Paths;
|
||||
*/
|
||||
public class TestMimeTypeUtil {
|
||||
@Test
|
||||
public void guessOpenDocumentFormatTest() throws Exception {
|
||||
public void test() throws Exception {
|
||||
// Detect ODT files
|
||||
Path path = Paths.get(ClassLoader.getSystemResource("file/document.odt").toURI());
|
||||
Assert.assertEquals(MimeType.OPEN_DOCUMENT_TEXT, MimeTypeUtil.guessMimeType(path, "document.odt"));
|
||||
@@ -27,5 +27,45 @@ public class TestMimeTypeUtil {
|
||||
// Detect PPTX files
|
||||
path = Paths.get(ClassLoader.getSystemResource("file/apache.pptx").toURI());
|
||||
Assert.assertEquals(MimeType.OFFICE_PRESENTATION, MimeTypeUtil.guessMimeType(path, "apache.pptx"));
|
||||
|
||||
// Detect XLSX files
|
||||
path = Paths.get(ClassLoader.getSystemResource("file/document.xlsx").toURI());
|
||||
Assert.assertEquals(MimeType.OFFICE_SHEET, MimeTypeUtil.guessMimeType(path, "document.xlsx"));
|
||||
|
||||
// Detect TXT files
|
||||
path = Paths.get(ClassLoader.getSystemResource("file/document.txt").toURI());
|
||||
Assert.assertEquals(MimeType.TEXT_PLAIN, MimeTypeUtil.guessMimeType(path, "document.txt"));
|
||||
|
||||
// Detect CSV files
|
||||
path = Paths.get(ClassLoader.getSystemResource("file/document.csv").toURI());
|
||||
Assert.assertEquals(MimeType.TEXT_CSV, MimeTypeUtil.guessMimeType(path, "document.csv"));
|
||||
|
||||
// Detect PDF files
|
||||
path = Paths.get(ClassLoader.getSystemResource("file/udhr.pdf").toURI());
|
||||
Assert.assertEquals(MimeType.APPLICATION_PDF, MimeTypeUtil.guessMimeType(path, "udhr.pdf"));
|
||||
|
||||
// Detect JPEG files
|
||||
path = Paths.get(ClassLoader.getSystemResource("file/apollo_portrait.jpg").toURI());
|
||||
Assert.assertEquals(MimeType.IMAGE_JPEG, MimeTypeUtil.guessMimeType(path, "apollo_portrait.jpg"));
|
||||
|
||||
// Detect GIF files
|
||||
path = Paths.get(ClassLoader.getSystemResource("file/image.gif").toURI());
|
||||
Assert.assertEquals(MimeType.IMAGE_GIF, MimeTypeUtil.guessMimeType(path, "image.gif"));
|
||||
|
||||
// Detect PNG files
|
||||
path = Paths.get(ClassLoader.getSystemResource("file/image.png").toURI());
|
||||
Assert.assertEquals(MimeType.IMAGE_PNG, MimeTypeUtil.guessMimeType(path, "image.png"));
|
||||
|
||||
// Detect ZIP files
|
||||
path = Paths.get(ClassLoader.getSystemResource("file/document.zip").toURI());
|
||||
Assert.assertEquals(MimeType.APPLICATION_ZIP, MimeTypeUtil.guessMimeType(path, "document.zip"));
|
||||
|
||||
// Detect WEBM files
|
||||
path = Paths.get(ClassLoader.getSystemResource("file/video.webm").toURI());
|
||||
Assert.assertEquals(MimeType.VIDEO_WEBM, MimeTypeUtil.guessMimeType(path, "video.webm"));
|
||||
|
||||
// Detect MP4 files
|
||||
path = Paths.get(ClassLoader.getSystemResource("file/video.mp4").toURI());
|
||||
Assert.assertEquals(MimeType.VIDEO_MP4, MimeTypeUtil.guessMimeType(path, "video.mp4"));
|
||||
}
|
||||
}
|
||||
|
||||
2
docs-core/src/test/resources/file/document.csv
Normal file
2
docs-core/src/test/resources/file/document.csv
Normal file
@@ -0,0 +1,2 @@
|
||||
col1,col2
|
||||
test,me
|
||||
|
1
docs-core/src/test/resources/file/document.txt
Normal file
1
docs-core/src/test/resources/file/document.txt
Normal file
@@ -0,0 +1 @@
|
||||
test me.
|
||||
BIN
docs-core/src/test/resources/file/document.xlsx
Normal file
BIN
docs-core/src/test/resources/file/document.xlsx
Normal file
Binary file not shown.
BIN
docs-core/src/test/resources/file/document.zip
Normal file
BIN
docs-core/src/test/resources/file/document.zip
Normal file
Binary file not shown.
BIN
docs-core/src/test/resources/file/image.gif
Normal file
BIN
docs-core/src/test/resources/file/image.gif
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 2.6 KiB |
BIN
docs-core/src/test/resources/file/image.png
Normal file
BIN
docs-core/src/test/resources/file/image.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 4.4 KiB |
BIN
docs-core/src/test/resources/file/video.mp4
Normal file
BIN
docs-core/src/test/resources/file/video.mp4
Normal file
Binary file not shown.
BIN
docs-core/src/test/resources/file/video.webm
Normal file
BIN
docs-core/src/test/resources/file/video.webm
Normal file
Binary file not shown.
@@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>com.sismics.docs</groupId>
|
||||
<artifactId>docs-parent</artifactId>
|
||||
<version>1.10</version>
|
||||
<version>1.11</version>
|
||||
<relativePath>..</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -0,0 +1,40 @@
|
||||
package com.sismics.rest.util;
|
||||
|
||||
import com.sismics.docs.core.model.jpa.File;
|
||||
import com.sismics.docs.core.util.DirectoryUtil;
|
||||
import com.sismics.docs.core.util.FileUtil;
|
||||
import com.sismics.rest.exception.ServerException;
|
||||
import com.sismics.util.JsonUtil;
|
||||
|
||||
import javax.json.Json;
|
||||
import javax.json.JsonObjectBuilder;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
|
||||
/**
|
||||
* Rest utilities.
|
||||
*
|
||||
* @author bgamard
|
||||
*/
|
||||
public class RestUtil {
|
||||
/**
|
||||
* Transform a File into its JSON representation
|
||||
* @param fileDb a file
|
||||
* @return the JSON
|
||||
*/
|
||||
public static JsonObjectBuilder fileToJsonObjectBuilder(File fileDb) {
|
||||
try {
|
||||
return Json.createObjectBuilder()
|
||||
.add("id", fileDb.getId())
|
||||
.add("processing", FileUtil.isProcessingFile(fileDb.getId()))
|
||||
.add("name", JsonUtil.nullable(fileDb.getName()))
|
||||
.add("version", fileDb.getVersion())
|
||||
.add("mimetype", fileDb.getMimeType())
|
||||
.add("document_id", JsonUtil.nullable(fileDb.getDocumentId()))
|
||||
.add("create_date", fileDb.getCreateDate().getTime())
|
||||
.add("size", Files.size(DirectoryUtil.getStorageDirectory().resolve(fileDb.getId())));
|
||||
} catch (IOException e) {
|
||||
throw new ServerException("FileError", "Unable to get the size of " + fileDb.getId(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -21,6 +21,8 @@ public class ValidationUtil {
|
||||
|
||||
private static Pattern ALPHANUMERIC_PATTERN = Pattern.compile("[a-zA-Z0-9_]+");
|
||||
|
||||
private static Pattern USERNAME_PATTERN = Pattern.compile("[a-zA-Z0-9_@\\.]+");
|
||||
|
||||
/**
|
||||
* Checks that the argument is not null.
|
||||
*
|
||||
@@ -152,6 +154,12 @@ public class ValidationUtil {
|
||||
}
|
||||
}
|
||||
|
||||
public static void validateUsername(String s, String name) throws ClientException {
|
||||
if (!USERNAME_PATTERN.matcher(s).matches()) {
|
||||
throw new ClientException("ValidationError", MessageFormat.format("{0} must have only alphanumeric, underscore characters or @ and .", name));
|
||||
}
|
||||
}
|
||||
|
||||
public static void validateRegex(String s, String name, String regex) throws ClientException {
|
||||
if (!Pattern.compile(regex).matcher(s).matches()) {
|
||||
throw new ClientException("ValidationError", MessageFormat.format("{0} must match {1}", name, regex));
|
||||
|
||||
@@ -26,6 +26,7 @@ import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.net.URI;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Base class of integration tests with Jersey.
|
||||
@@ -33,6 +34,16 @@ import java.util.List;
|
||||
* @author jtremeaux
|
||||
*/
|
||||
public abstract class BaseJerseyTest extends JerseyTest {
|
||||
protected static final String FILE_APACHE_PPTX = "file/apache.pptx";
|
||||
protected static final String FILE_DOCUMENT_DOCX = "file/document.docx";
|
||||
protected static final String FILE_DOCUMENT_ODT = "file/document.odt";
|
||||
protected static final String FILE_DOCUMENT_TXT = "file/document.txt";
|
||||
protected static final String FILE_EINSTEIN_ROOSEVELT_LETTER_PNG = "file/Einstein-Roosevelt-letter.png";
|
||||
protected static final String FILE_PIA_00452_JPG = "file/PIA00452.jpg";
|
||||
protected static final String FILE_VIDEO_WEBM = "file/video.webm";
|
||||
protected static final String FILE_WIKIPEDIA_PDF = "file/wikipedia.pdf";
|
||||
protected static final String FILE_WIKIPEDIA_ZIP = "file/wikipedia.zip";
|
||||
|
||||
/**
|
||||
* Test HTTP server.
|
||||
*/
|
||||
@@ -56,7 +67,7 @@ public abstract class BaseJerseyTest extends JerseyTest {
|
||||
@Override
|
||||
protected Application configure() {
|
||||
String travisEnv = System.getenv("TRAVIS");
|
||||
if (travisEnv == null || !travisEnv.equals("true")) {
|
||||
if (!Objects.equals(travisEnv, "true")) {
|
||||
// Travis doesn't like big logs
|
||||
enable(TestProperties.LOG_TRAFFIC);
|
||||
enable(TestProperties.DUMP_ENTITY);
|
||||
|
||||
@@ -3,6 +3,7 @@ package com.sismics.docs.rest.util;
|
||||
import com.google.common.io.Resources;
|
||||
import com.sismics.util.filter.TokenBasedSecurityFilter;
|
||||
import org.glassfish.jersey.media.multipart.FormDataMultiPart;
|
||||
import org.glassfish.jersey.media.multipart.MultiPart;
|
||||
import org.glassfish.jersey.media.multipart.MultiPartFeature;
|
||||
import org.glassfish.jersey.media.multipart.file.StreamDataBodyPart;
|
||||
import org.junit.Assert;
|
||||
@@ -16,6 +17,12 @@ import javax.ws.rs.core.NewCookie;
|
||||
import javax.ws.rs.core.Response;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.URISyntaxException;
|
||||
import java.net.URL;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.Date;
|
||||
|
||||
/**
|
||||
* REST client utilities.
|
||||
@@ -156,27 +163,58 @@ public class ClientUtil {
|
||||
return authToken;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a document
|
||||
*
|
||||
* @param token Authentication token
|
||||
* @return Document ID
|
||||
*/
|
||||
public String createDocument(String token) {
|
||||
JsonObject json = this.resource.path("/document").request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, token)
|
||||
.put(Entity.form(new Form()
|
||||
.param("title", "Document Title")
|
||||
.param("description", "Document description")
|
||||
.param("language", "eng")
|
||||
.param("create_date", Long.toString(new Date().getTime()))), JsonObject.class);
|
||||
String documentId = json.getString("id");
|
||||
Assert.assertNotNull(documentId);
|
||||
return documentId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a file to a document.
|
||||
*
|
||||
* @param file File path
|
||||
* @param filename Filename
|
||||
* @param token Authentication token
|
||||
* @param documentId Document ID
|
||||
* @return File ID
|
||||
* @throws IOException e
|
||||
* @throws URISyntaxException e
|
||||
*/
|
||||
public String addFileToDocument(String file, String filename, String token, String documentId) throws IOException {
|
||||
try (InputStream is = Resources.getResource(file).openStream()) {
|
||||
public String addFileToDocument(String file, String token, String documentId) throws IOException, URISyntaxException {
|
||||
URL fileResource = Resources.getResource(file);
|
||||
Path filePath = Paths.get(fileResource.toURI());
|
||||
String filename = filePath.getFileName().toString();
|
||||
try (InputStream is = fileResource.openStream()) {
|
||||
StreamDataBodyPart streamDataBodyPart = new StreamDataBodyPart("file", is, filename);
|
||||
try (FormDataMultiPart multiPart = new FormDataMultiPart()) {
|
||||
JsonObject json = resource
|
||||
MultiPart formContent;
|
||||
if (documentId != null) {
|
||||
formContent = multiPart.field("id", documentId).bodyPart(streamDataBodyPart);
|
||||
} else {
|
||||
formContent = multiPart.bodyPart(streamDataBodyPart);
|
||||
}
|
||||
JsonObject json = this.resource
|
||||
.register(MultiPartFeature.class)
|
||||
.path("/file").request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, token)
|
||||
.put(Entity.entity(multiPart.field("id", documentId).bodyPart(streamDataBodyPart),
|
||||
.put(Entity.entity(formContent,
|
||||
MediaType.MULTIPART_FORM_DATA_TYPE), JsonObject.class);
|
||||
return json.getString("id");
|
||||
String fileId = json.getString("id");
|
||||
Assert.assertNotNull(fileId);
|
||||
Assert.assertEquals(Files.size(filePath), json.getJsonNumber("size").longValue());
|
||||
return fileId;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>com.sismics.docs</groupId>
|
||||
<artifactId>docs-parent</artifactId>
|
||||
<version>1.10</version>
|
||||
<version>1.11</version>
|
||||
<relativePath>..</relativePath>
|
||||
</parent>
|
||||
|
||||
|
||||
@@ -205,28 +205,28 @@ public class AppResource extends BaseResource {
|
||||
Config passwordConfig = configDao.getById(ConfigType.SMTP_PASSWORD);
|
||||
Config fromConfig = configDao.getById(ConfigType.SMTP_FROM);
|
||||
JsonObjectBuilder response = Json.createObjectBuilder();
|
||||
if (System.getenv(Constants.SMTP_HOSTNAME_ENV) == null) {
|
||||
if (Strings.isNullOrEmpty(System.getenv(Constants.SMTP_HOSTNAME_ENV))) {
|
||||
if (hostnameConfig == null) {
|
||||
response.addNull("hostname");
|
||||
} else {
|
||||
response.add("hostname", hostnameConfig.getValue());
|
||||
}
|
||||
}
|
||||
if (System.getenv(Constants.SMTP_PORT_ENV) == null) {
|
||||
if (Strings.isNullOrEmpty(System.getenv(Constants.SMTP_PORT_ENV))) {
|
||||
if (portConfig == null) {
|
||||
response.addNull("port");
|
||||
} else {
|
||||
response.add("port", Integer.valueOf(portConfig.getValue()));
|
||||
}
|
||||
}
|
||||
if (System.getenv(Constants.SMTP_USERNAME_ENV) == null) {
|
||||
if (Strings.isNullOrEmpty(System.getenv(Constants.SMTP_USERNAME_ENV))) {
|
||||
if (usernameConfig == null) {
|
||||
response.addNull("username");
|
||||
} else {
|
||||
response.add("username", usernameConfig.getValue());
|
||||
}
|
||||
}
|
||||
if (System.getenv(Constants.SMTP_PASSWORD_ENV) == null) {
|
||||
if (Strings.isNullOrEmpty(System.getenv(Constants.SMTP_PASSWORD_ENV))) {
|
||||
if (passwordConfig == null) {
|
||||
response.addNull("password");
|
||||
} else {
|
||||
@@ -396,6 +396,8 @@ public class AppResource extends BaseResource {
|
||||
* @apiName PostAppConfigInbox
|
||||
* @apiGroup App
|
||||
* @apiParam {Boolean} enabled True if the inbox scanning is enabled
|
||||
* @apiParam {Boolean} autoTagsEnabled If true automatically add tags to document (prefixed by #)
|
||||
* @apiParam {Boolean} deleteImported If true delete message from mailbox after import
|
||||
* @apiParam {String} hostname IMAP hostname
|
||||
* @apiParam {Integer} port IMAP port
|
||||
* @apiParam {String} username IMAP username
|
||||
@@ -432,6 +434,8 @@ public class AppResource extends BaseResource {
|
||||
}
|
||||
checkBaseFunction(BaseFunction.ADMIN);
|
||||
ValidationUtil.validateRequired(enabled, "enabled");
|
||||
ValidationUtil.validateRequired(autoTagsEnabled, "autoTagsEnabled");
|
||||
ValidationUtil.validateRequired(deleteImported, "deleteImported");
|
||||
if (!Strings.isNullOrEmpty(portStr)) {
|
||||
ValidationUtil.validateInteger(portStr, "port");
|
||||
}
|
||||
@@ -508,7 +512,7 @@ public class AppResource extends BaseResource {
|
||||
* @apiSuccess {String} logs.message Message
|
||||
* @apiError (client) ForbiddenError Access denied
|
||||
* @apiError (server) ServerError MEMORY appender not configured
|
||||
* @apiPermission user
|
||||
* @apiPermission admin
|
||||
* @apiVersion 1.5.0
|
||||
*
|
||||
* @param minLevel Filter on logging level
|
||||
@@ -529,6 +533,7 @@ public class AppResource extends BaseResource {
|
||||
if (!authenticate()) {
|
||||
throw new ForbiddenClientException();
|
||||
}
|
||||
checkBaseFunction(BaseFunction.ADMIN);
|
||||
|
||||
// Get the memory appender
|
||||
org.apache.log4j.Logger logger = org.apache.log4j.Logger.getRootLogger();
|
||||
@@ -665,45 +670,45 @@ public class AppResource extends BaseResource {
|
||||
log.info("Deleting {} orphan ACLs", q.executeUpdate());
|
||||
|
||||
// Soft delete orphan comments
|
||||
q = em.createNativeQuery("update T_COMMENT c set c.COM_DELETEDATE_D = :dateNow where c.COM_ID_C in (select c.COM_ID_C from T_COMMENT c left join T_DOCUMENT d on d.DOC_ID_C = c.COM_IDDOC_C and d.DOC_DELETEDATE_D is null where d.DOC_ID_C is null)");
|
||||
q = em.createNativeQuery("update T_COMMENT set COM_DELETEDATE_D = :dateNow where COM_ID_C in (select c.COM_ID_C from T_COMMENT c left join T_DOCUMENT d on d.DOC_ID_C = c.COM_IDDOC_C and d.DOC_DELETEDATE_D is null where d.DOC_ID_C is null)");
|
||||
q.setParameter("dateNow", new Date());
|
||||
log.info("Deleting {} orphan comments", q.executeUpdate());
|
||||
|
||||
// Soft delete orphan document tag links
|
||||
q = em.createNativeQuery("update T_DOCUMENT_TAG dt set dt.DOT_DELETEDATE_D = :dateNow where dt.DOT_ID_C in (select dt.DOT_ID_C from T_DOCUMENT_TAG dt left join T_DOCUMENT d on dt.DOT_IDDOCUMENT_C = d.DOC_ID_C and d.DOC_DELETEDATE_D is null left join T_TAG t on t.TAG_ID_C = dt.DOT_IDTAG_C and t.TAG_DELETEDATE_D is null where d.DOC_ID_C is null or t.TAG_ID_C is null)");
|
||||
q = em.createNativeQuery("update T_DOCUMENT_TAG set DOT_DELETEDATE_D = :dateNow where DOT_ID_C in (select dt.DOT_ID_C from T_DOCUMENT_TAG dt left join T_DOCUMENT d on dt.DOT_IDDOCUMENT_C = d.DOC_ID_C and d.DOC_DELETEDATE_D is null left join T_TAG t on t.TAG_ID_C = dt.DOT_IDTAG_C and t.TAG_DELETEDATE_D is null where d.DOC_ID_C is null or t.TAG_ID_C is null)");
|
||||
q.setParameter("dateNow", new Date());
|
||||
log.info("Deleting {} orphan document tag links", q.executeUpdate());
|
||||
|
||||
// Soft delete orphan shares
|
||||
q = em.createNativeQuery("update T_SHARE s set s.SHA_DELETEDATE_D = :dateNow where s.SHA_ID_C in (select s.SHA_ID_C from T_SHARE s left join T_ACL a on a.ACL_TARGETID_C = s.SHA_ID_C and a.ACL_DELETEDATE_D is null where a.ACL_ID_C is null)");
|
||||
q = em.createNativeQuery("update T_SHARE set SHA_DELETEDATE_D = :dateNow where SHA_ID_C in (select s.SHA_ID_C from T_SHARE s left join T_ACL a on a.ACL_TARGETID_C = s.SHA_ID_C and a.ACL_DELETEDATE_D is null where a.ACL_ID_C is null)");
|
||||
q.setParameter("dateNow", new Date());
|
||||
log.info("Deleting {} orphan shares", q.executeUpdate());
|
||||
|
||||
// Soft delete orphan tags
|
||||
q = em.createNativeQuery("update T_TAG t set t.TAG_DELETEDATE_D = :dateNow where t.TAG_ID_C in (select t.TAG_ID_C from T_TAG t left join T_USER u on u.USE_ID_C = t.TAG_IDUSER_C and u.USE_DELETEDATE_D is null where u.USE_ID_C is null)");
|
||||
q = em.createNativeQuery("update T_TAG set TAG_DELETEDATE_D = :dateNow where TAG_ID_C in (select t.TAG_ID_C from T_TAG t left join T_USER u on u.USE_ID_C = t.TAG_IDUSER_C and u.USE_DELETEDATE_D is null where u.USE_ID_C is null)");
|
||||
q.setParameter("dateNow", new Date());
|
||||
log.info("Deleting {} orphan tags", q.executeUpdate());
|
||||
|
||||
// Soft delete orphan documents
|
||||
q = em.createNativeQuery("update T_DOCUMENT d set d.DOC_DELETEDATE_D = :dateNow where d.DOC_ID_C in (select d.DOC_ID_C from T_DOCUMENT d left join T_USER u on u.USE_ID_C = d.DOC_IDUSER_C and u.USE_DELETEDATE_D is null where u.USE_ID_C is null)");
|
||||
q = em.createNativeQuery("update T_DOCUMENT set DOC_DELETEDATE_D = :dateNow where DOC_ID_C in (select d.DOC_ID_C from T_DOCUMENT d left join T_USER u on u.USE_ID_C = d.DOC_IDUSER_C and u.USE_DELETEDATE_D is null where u.USE_ID_C is null)");
|
||||
q.setParameter("dateNow", new Date());
|
||||
log.info("Deleting {} orphan documents", q.executeUpdate());
|
||||
|
||||
// Soft delete orphan files
|
||||
q = em.createNativeQuery("update T_FILE f set f.FIL_DELETEDATE_D = :dateNow where f.FIL_ID_C in (select f.FIL_ID_C from T_FILE f left join T_USER u on u.USE_ID_C = f.FIL_IDUSER_C and u.USE_DELETEDATE_D is null where u.USE_ID_C is null)");
|
||||
q = em.createNativeQuery("update T_FILE set FIL_DELETEDATE_D = :dateNow where FIL_ID_C in (select f.FIL_ID_C from T_FILE f left join T_USER u on u.USE_ID_C = f.FIL_IDUSER_C and u.USE_DELETEDATE_D is null where u.USE_ID_C is null)");
|
||||
q.setParameter("dateNow", new Date());
|
||||
log.info("Deleting {} orphan files", q.executeUpdate());
|
||||
|
||||
// Hard delete softly deleted data
|
||||
log.info("Deleting {} soft deleted document tag links", em.createQuery("delete DocumentTag dt where dt.deleteDate is not null").executeUpdate());
|
||||
log.info("Deleting {} soft deleted ACLs", em.createQuery("delete Acl a where a.deleteDate is not null").executeUpdate());
|
||||
log.info("Deleting {} soft deleted shares", em.createQuery("delete Share s where s.deleteDate is not null").executeUpdate());
|
||||
log.info("Deleting {} soft deleted tags", em.createQuery("delete Tag t where t.deleteDate is not null").executeUpdate());
|
||||
log.info("Deleting {} soft deleted comments", em.createQuery("delete Comment c where c.deleteDate is not null").executeUpdate());
|
||||
log.info("Deleting {} soft deleted files", em.createQuery("delete File f where f.deleteDate is not null").executeUpdate());
|
||||
log.info("Deleting {} soft deleted documents", em.createQuery("delete Document d where d.deleteDate is not null").executeUpdate());
|
||||
log.info("Deleting {} soft deleted users", em.createQuery("delete User u where u.deleteDate is not null").executeUpdate());
|
||||
log.info("Deleting {} soft deleted groups", em.createQuery("delete Group g where g.deleteDate is not null").executeUpdate());
|
||||
log.info("Deleting {} soft deleted document tag links", em.createQuery("delete DocumentTag where deleteDate is not null").executeUpdate());
|
||||
log.info("Deleting {} soft deleted ACLs", em.createQuery("delete Acl where deleteDate is not null").executeUpdate());
|
||||
log.info("Deleting {} soft deleted shares", em.createQuery("delete Share where deleteDate is not null").executeUpdate());
|
||||
log.info("Deleting {} soft deleted tags", em.createQuery("delete Tag where deleteDate is not null").executeUpdate());
|
||||
log.info("Deleting {} soft deleted comments", em.createQuery("delete Comment where deleteDate is not null").executeUpdate());
|
||||
log.info("Deleting {} soft deleted files", em.createQuery("delete File where deleteDate is not null").executeUpdate());
|
||||
log.info("Deleting {} soft deleted documents", em.createQuery("delete Document where deleteDate is not null").executeUpdate());
|
||||
log.info("Deleting {} soft deleted users", em.createQuery("delete User where deleteDate is not null").executeUpdate());
|
||||
log.info("Deleting {} soft deleted groups", em.createQuery("delete Group where deleteDate is not null").executeUpdate());
|
||||
|
||||
// Always return OK
|
||||
JsonObjectBuilder response = Json.createObjectBuilder()
|
||||
|
||||
@@ -9,6 +9,7 @@ import com.sismics.util.filter.SecurityFilter;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.ws.rs.Consumes;
|
||||
import javax.ws.rs.Produces;
|
||||
import javax.ws.rs.QueryParam;
|
||||
import javax.ws.rs.core.Context;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
@@ -22,6 +23,7 @@ import java.util.Set;
|
||||
* @author jtremeaux
|
||||
*/
|
||||
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
public abstract class BaseResource {
|
||||
/**
|
||||
* @apiDefine admin Admin
|
||||
|
||||
@@ -0,0 +1,34 @@
|
||||
package com.sismics.docs.rest.resource;
|
||||
|
||||
import org.glassfish.jersey.message.internal.ReaderWriter;
|
||||
|
||||
import javax.json.JsonObject;
|
||||
import javax.ws.rs.Produces;
|
||||
import javax.ws.rs.WebApplicationException;
|
||||
import javax.ws.rs.core.MediaType;
|
||||
import javax.ws.rs.core.MultivaluedMap;
|
||||
import javax.ws.rs.ext.MessageBodyWriter;
|
||||
import javax.ws.rs.ext.Provider;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.lang.annotation.Annotation;
|
||||
import java.lang.reflect.Type;
|
||||
|
||||
/**
|
||||
* When a JSON-based exception is thrown but a JSON response is not expected,
|
||||
* set the media type of the response as plain text.
|
||||
*/
|
||||
@Provider
|
||||
@Produces(MediaType.APPLICATION_OCTET_STREAM)
|
||||
public class DocsMessageBodyWriter implements MessageBodyWriter<JsonObject> {
|
||||
|
||||
@Override
|
||||
public boolean isWriteable(Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(JsonObject o, Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType, MultivaluedMap<String, Object> httpHeaders, OutputStream entityStream) throws IOException, WebApplicationException {
|
||||
ReaderWriter.writeToAsString(o.toString(), entityStream, MediaType.TEXT_PLAIN_TYPE);
|
||||
}
|
||||
}
|
||||
@@ -27,11 +27,13 @@ import com.sismics.rest.exception.ClientException;
|
||||
import com.sismics.rest.exception.ForbiddenClientException;
|
||||
import com.sismics.rest.exception.ServerException;
|
||||
import com.sismics.rest.util.AclUtil;
|
||||
import com.sismics.rest.util.RestUtil;
|
||||
import com.sismics.rest.util.ValidationUtil;
|
||||
import com.sismics.util.EmailUtil;
|
||||
import com.sismics.util.JsonUtil;
|
||||
import com.sismics.util.context.ThreadLocalContext;
|
||||
import com.sismics.util.mime.MimeType;
|
||||
import org.apache.commons.collections4.CollectionUtils;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.glassfish.jersey.media.multipart.FormDataBodyPart;
|
||||
import org.glassfish.jersey.media.multipart.FormDataParam;
|
||||
@@ -65,6 +67,21 @@ import java.util.*;
|
||||
*/
|
||||
@Path("/document")
|
||||
public class DocumentResource extends BaseResource {
|
||||
|
||||
protected static final DateTimeParser YEAR_PARSER = DateTimeFormat.forPattern("yyyy").getParser();
|
||||
protected static final DateTimeParser MONTH_PARSER = DateTimeFormat.forPattern("yyyy-MM").getParser();
|
||||
protected static final DateTimeParser DAY_PARSER = DateTimeFormat.forPattern("yyyy-MM-dd").getParser();
|
||||
|
||||
private static final DateTimeFormatter DAY_FORMATTER = new DateTimeFormatter(null, DAY_PARSER);
|
||||
private static final DateTimeFormatter MONTH_FORMATTER = new DateTimeFormatter(null, MONTH_PARSER);
|
||||
private static final DateTimeFormatter YEAR_FORMATTER = new DateTimeFormatter(null, YEAR_PARSER);
|
||||
|
||||
private static final DateTimeParser[] DATE_PARSERS = new DateTimeParser[]{
|
||||
YEAR_PARSER,
|
||||
MONTH_PARSER,
|
||||
DAY_PARSER};
|
||||
private static final DateTimeFormatter DATE_FORMATTER = new DateTimeFormatterBuilder().append( null, DATE_PARSERS).toFormatter();
|
||||
|
||||
/**
|
||||
* Returns a document.
|
||||
*
|
||||
@@ -73,6 +90,7 @@ public class DocumentResource extends BaseResource {
|
||||
* @apiGroup Document
|
||||
* @apiParam {String} id Document ID
|
||||
* @apiParam {String} share Share ID
|
||||
* @apiParam {Booleans} files If true includes files information
|
||||
* @apiSuccess {String} id ID
|
||||
* @apiSuccess {String} title Title
|
||||
* @apiSuccess {String} description Description
|
||||
@@ -119,6 +137,12 @@ public class DocumentResource extends BaseResource {
|
||||
* @apiSuccess {String} route_step.name Route step name
|
||||
* @apiSuccess {String="APPROVE", "VALIDATE"} route_step.type Route step type
|
||||
* @apiSuccess {Boolean} route_step.transitionable True if the route step is actionable by the current user
|
||||
* @apiSuccess {Object[]} files List of files
|
||||
* @apiSuccess {String} files.id ID
|
||||
* @apiSuccess {String} files.name File name
|
||||
* @apiSuccess {String} files.version Zero-based version number
|
||||
* @apiSuccess {String} files.mimetype MIME type
|
||||
* @apiSuccess {String} files.create_date Create date (timestamp)
|
||||
* @apiError (client) NotFound Document not found
|
||||
* @apiPermission none
|
||||
* @apiVersion 1.5.0
|
||||
@@ -131,7 +155,8 @@ public class DocumentResource extends BaseResource {
|
||||
@Path("{id: [a-z0-9\\-]+}")
|
||||
public Response get(
|
||||
@PathParam("id") String documentId,
|
||||
@QueryParam("share") String shareId) {
|
||||
@QueryParam("share") String shareId,
|
||||
@QueryParam("files") Boolean files) {
|
||||
authenticate();
|
||||
|
||||
DocumentDao documentDao = new DocumentDao();
|
||||
@@ -240,6 +265,19 @@ public class DocumentResource extends BaseResource {
|
||||
// Add custom metadata
|
||||
MetadataUtil.addMetadata(document, documentId);
|
||||
|
||||
// Add files
|
||||
if (Boolean.TRUE == files) {
|
||||
FileDao fileDao = new FileDao();
|
||||
List<File> fileList = fileDao.getByDocumentsIds(Collections.singleton(documentId));
|
||||
|
||||
JsonArrayBuilder filesArrayBuilder = Json.createArrayBuilder();
|
||||
for (File fileDb : fileList) {
|
||||
filesArrayBuilder.add(RestUtil.fileToJsonObjectBuilder(fileDb));
|
||||
}
|
||||
|
||||
document.add("files", filesArrayBuilder);
|
||||
}
|
||||
|
||||
return Response.ok().entity(document.build()).build();
|
||||
}
|
||||
|
||||
@@ -326,7 +364,8 @@ public class DocumentResource extends BaseResource {
|
||||
* @apiParam {String} offset Start at this index
|
||||
* @apiParam {Number} sort_column Column index to sort on
|
||||
* @apiParam {Boolean} asc If true, sort in ascending order
|
||||
* @apiParam {String} search Search query
|
||||
* @apiParam {String} search Search query (see "Document search syntax" on the top of the page for explanations)
|
||||
* @apiParam {Booleans} files If true includes files information
|
||||
* @apiSuccess {Number} total Total number of documents
|
||||
* @apiSuccess {Object[]} documents List of documents
|
||||
* @apiSuccess {String} documents.id ID
|
||||
@@ -345,6 +384,12 @@ public class DocumentResource extends BaseResource {
|
||||
* @apiSuccess {String} documents.tags.id ID
|
||||
* @apiSuccess {String} documents.tags.name Name
|
||||
* @apiSuccess {String} documents.tags.color Color
|
||||
* @apiSuccess {Object[]} documents.files List of files
|
||||
* @apiSuccess {String} documents.files.id ID
|
||||
* @apiSuccess {String} documents.files.name File name
|
||||
* @apiSuccess {String} documents.files.version Zero-based version number
|
||||
* @apiSuccess {String} documents.files.mimetype MIME type
|
||||
* @apiSuccess {String} documents.files.create_date Create date (timestamp)
|
||||
* @apiSuccess {String[]} suggestions List of search suggestions
|
||||
* @apiError (client) ForbiddenError Access denied
|
||||
* @apiError (server) SearchError Error searching in documents
|
||||
@@ -356,6 +401,7 @@ public class DocumentResource extends BaseResource {
|
||||
* @param sortColumn Sort column
|
||||
* @param asc Sorting
|
||||
* @param search Search query
|
||||
* @param files Files list
|
||||
* @return Response
|
||||
*/
|
||||
@GET
|
||||
@@ -365,7 +411,8 @@ public class DocumentResource extends BaseResource {
|
||||
@QueryParam("offset") Integer offset,
|
||||
@QueryParam("sort_column") Integer sortColumn,
|
||||
@QueryParam("asc") Boolean asc,
|
||||
@QueryParam("search") String search) {
|
||||
@QueryParam("search") String search,
|
||||
@QueryParam("files") Boolean files) {
|
||||
if (!authenticate()) {
|
||||
throw new ForbiddenClientException();
|
||||
}
|
||||
@@ -385,6 +432,14 @@ public class DocumentResource extends BaseResource {
|
||||
throw new ServerException("SearchError", "Error searching in documents", e);
|
||||
}
|
||||
|
||||
// Find the files of the documents
|
||||
List<File> filesList = null;
|
||||
if (Boolean.TRUE == files) {
|
||||
Iterable<String> documentsIds = CollectionUtils.collect(paginatedList.getResultList(), DocumentDto::getId);
|
||||
FileDao fileDao = new FileDao();
|
||||
filesList = fileDao.getByDocumentsIds(documentsIds);
|
||||
}
|
||||
|
||||
for (DocumentDto documentDto : paginatedList.getResultList()) {
|
||||
// Get tags accessible by the current user on this document
|
||||
List<TagDto> tagDtoList = tagDao.findByCriteria(new TagCriteria()
|
||||
@@ -398,7 +453,7 @@ public class DocumentResource extends BaseResource {
|
||||
.add("color", tagDto.getColor()));
|
||||
}
|
||||
|
||||
documents.add(Json.createObjectBuilder()
|
||||
JsonObjectBuilder documentObjectBuilder = Json.createObjectBuilder()
|
||||
.add("id", documentDto.getId())
|
||||
.add("highlight", JsonUtil.nullable(documentDto.getHighlight()))
|
||||
.add("file_id", JsonUtil.nullable(documentDto.getFileId()))
|
||||
@@ -411,7 +466,17 @@ public class DocumentResource extends BaseResource {
|
||||
.add("active_route", documentDto.isActiveRoute())
|
||||
.add("current_step_name", JsonUtil.nullable(documentDto.getCurrentStepName()))
|
||||
.add("file_count", documentDto.getFileCount())
|
||||
.add("tags", tags));
|
||||
.add("tags", tags);
|
||||
if (Boolean.TRUE == files) {
|
||||
JsonArrayBuilder filesArrayBuilder = Json.createArrayBuilder();
|
||||
// Find files matching the document
|
||||
Collection<File> filesOfDocument = CollectionUtils.select(filesList, file -> file.getDocumentId().equals(documentDto.getId()));
|
||||
for (File fileDb : filesOfDocument) {
|
||||
filesArrayBuilder.add(RestUtil.fileToJsonObjectBuilder(fileDb));
|
||||
}
|
||||
documentObjectBuilder.add("files", filesArrayBuilder);
|
||||
}
|
||||
documents.add(documentObjectBuilder);
|
||||
}
|
||||
|
||||
JsonArrayBuilder suggestions = Json.createArrayBuilder();
|
||||
@@ -442,16 +507,8 @@ public class DocumentResource extends BaseResource {
|
||||
TagDao tagDao = new TagDao();
|
||||
List<TagDto> allTagDtoList = tagDao.findByCriteria(new TagCriteria().setTargetIdList(getTargetIdList(null)), null);
|
||||
UserDao userDao = new UserDao();
|
||||
DateTimeParser[] parsers = {
|
||||
DateTimeFormat.forPattern("yyyy").getParser(),
|
||||
DateTimeFormat.forPattern("yyyy-MM").getParser(),
|
||||
DateTimeFormat.forPattern("yyyy-MM-dd").getParser() };
|
||||
DateTimeFormatter yearFormatter = new DateTimeFormatter(null, parsers[0]);
|
||||
DateTimeFormatter monthFormatter = new DateTimeFormatter(null, parsers[1]);
|
||||
DateTimeFormatter dayFormatter = new DateTimeFormatter(null, parsers[2]);
|
||||
DateTimeFormatter formatter = new DateTimeFormatterBuilder().append( null, parsers ).toFormatter();
|
||||
|
||||
String[] criteriaList = search.split(" *");
|
||||
String[] criteriaList = search.split(" +");
|
||||
List<String> query = new ArrayList<>();
|
||||
List<String> fullQuery = new ArrayList<>();
|
||||
for (String criteria : criteriaList) {
|
||||
@@ -461,20 +518,16 @@ public class DocumentResource extends BaseResource {
|
||||
fullQuery.add(criteria);
|
||||
continue;
|
||||
}
|
||||
String paramName = params[0];
|
||||
String paramValue = params[1];
|
||||
|
||||
switch (params[0]) {
|
||||
switch (paramName) {
|
||||
case "tag":
|
||||
case "!tag":
|
||||
// New tag criteria
|
||||
List<TagDto> tagDtoList = TagUtil.findByName(params[1], allTagDtoList);
|
||||
if (documentCriteria.getTagIdList() == null) {
|
||||
documentCriteria.setTagIdList(new ArrayList<>());
|
||||
}
|
||||
if (documentCriteria.getExcludedTagIdList() == null) {
|
||||
documentCriteria.setExcludedTagIdList(new ArrayList<>());
|
||||
}
|
||||
List<TagDto> tagDtoList = TagUtil.findByName(paramValue, allTagDtoList);
|
||||
if (tagDtoList.isEmpty()) {
|
||||
// No tag found, the request must returns nothing
|
||||
// No tag found, the request must return nothing
|
||||
documentCriteria.getTagIdList().add(Lists.newArrayList(UUID.randomUUID().toString()));
|
||||
} else {
|
||||
List<String> tagIdList = Lists.newArrayList();
|
||||
@@ -485,7 +538,7 @@ public class DocumentResource extends BaseResource {
|
||||
tagIdList.add(childrenTagDto.getId());
|
||||
}
|
||||
}
|
||||
if (params[0].startsWith("!")) {
|
||||
if (paramName.startsWith("!")) {
|
||||
documentCriteria.getExcludedTagIdList().add(tagIdList);
|
||||
} else {
|
||||
documentCriteria.getTagIdList().add(tagIdList);
|
||||
@@ -498,9 +551,9 @@ public class DocumentResource extends BaseResource {
|
||||
case "ubefore":
|
||||
// New date span criteria
|
||||
try {
|
||||
boolean isUpdated = params[0].startsWith("u");
|
||||
DateTime date = formatter.parseDateTime(params[1]);
|
||||
if (params[0].endsWith("before")) {
|
||||
boolean isUpdated = paramName.startsWith("u");
|
||||
DateTime date = DATE_FORMATTER.parseDateTime(paramValue);
|
||||
if (paramName.endsWith("before")) {
|
||||
if (isUpdated) documentCriteria.setUpdateDateMax(date.toDate());
|
||||
else documentCriteria.setCreateDateMax(date.toDate());
|
||||
} else {
|
||||
@@ -516,11 +569,11 @@ public class DocumentResource extends BaseResource {
|
||||
case "uat":
|
||||
case "at":
|
||||
// New specific date criteria
|
||||
try {
|
||||
boolean isUpdated = params[0].startsWith("u");
|
||||
switch (params[1].length()) {
|
||||
try {
|
||||
switch (paramValue.length()) {
|
||||
case 10: {
|
||||
DateTime date = dayFormatter.parseDateTime(params[1]);
|
||||
DateTime date = DATE_FORMATTER.parseDateTime(params[1]);
|
||||
if (isUpdated) {
|
||||
documentCriteria.setUpdateDateMin(date.toDate());
|
||||
documentCriteria.setUpdateDateMax(date.plusDays(1).minusSeconds(1).toDate());
|
||||
@@ -531,7 +584,7 @@ public class DocumentResource extends BaseResource {
|
||||
break;
|
||||
}
|
||||
case 7: {
|
||||
DateTime date = monthFormatter.parseDateTime(params[1]);
|
||||
DateTime date = MONTH_FORMATTER.parseDateTime(params[1]);
|
||||
if (isUpdated) {
|
||||
documentCriteria.setUpdateDateMin(date.toDate());
|
||||
documentCriteria.setUpdateDateMax(date.plusMonths(1).minusSeconds(1).toDate());
|
||||
@@ -542,7 +595,7 @@ public class DocumentResource extends BaseResource {
|
||||
break;
|
||||
}
|
||||
case 4: {
|
||||
DateTime date = yearFormatter.parseDateTime(params[1]);
|
||||
DateTime date = YEAR_FORMATTER.parseDateTime(params[1]);
|
||||
if (isUpdated) {
|
||||
documentCriteria.setUpdateDateMin(date.toDate());
|
||||
documentCriteria.setUpdateDateMax(date.plusYears(1).minusSeconds(1).toDate());
|
||||
@@ -551,6 +604,10 @@ public class DocumentResource extends BaseResource {
|
||||
documentCriteria.setCreateDateMax(date.plusYears(1).minusSeconds(1).toDate());
|
||||
}
|
||||
break;
|
||||
} default: {
|
||||
// Invalid format, returns no documents
|
||||
documentCriteria.setCreateDateMin(new Date(0));
|
||||
documentCriteria.setCreateDateMax(new Date(0));
|
||||
}
|
||||
}
|
||||
} catch (IllegalArgumentException e) {
|
||||
@@ -561,25 +618,26 @@ public class DocumentResource extends BaseResource {
|
||||
break;
|
||||
case "shared":
|
||||
// New shared state criteria
|
||||
documentCriteria.setShared(params[1].equals("yes"));
|
||||
documentCriteria.setShared(paramValue.equals("yes"));
|
||||
break;
|
||||
case "lang":
|
||||
// New language criteria
|
||||
if (Constants.SUPPORTED_LANGUAGES.contains(params[1])) {
|
||||
documentCriteria.setLanguage(params[1]);
|
||||
if (Constants.SUPPORTED_LANGUAGES.contains(paramValue)) {
|
||||
documentCriteria.setLanguage(paramValue);
|
||||
} else {
|
||||
// Unsupported language, returns no documents
|
||||
documentCriteria.setLanguage(UUID.randomUUID().toString());
|
||||
}
|
||||
break;
|
||||
case "mime":
|
||||
// New mime type criteria
|
||||
documentCriteria.setMimeType(params[1]);
|
||||
documentCriteria.setMimeType(paramValue);
|
||||
break;
|
||||
case "by":
|
||||
// New creator criteria
|
||||
User user = userDao.getActiveByUsername(params[1]);
|
||||
User user = userDao.getActiveByUsername(paramValue);
|
||||
if (user == null) {
|
||||
// This user doesn't exists, return nothing
|
||||
// This user doesn't exist, return nothing
|
||||
documentCriteria.setCreatorId(UUID.randomUUID().toString());
|
||||
} else {
|
||||
// This user exists, search its documents
|
||||
@@ -588,19 +646,19 @@ public class DocumentResource extends BaseResource {
|
||||
break;
|
||||
case "workflow":
|
||||
// New shared state criteria
|
||||
documentCriteria.setActiveRoute(params[1].equals("me"));
|
||||
documentCriteria.setActiveRoute(paramValue.equals("me"));
|
||||
break;
|
||||
case "simple":
|
||||
// New simple search criteria
|
||||
query.add(params[1]);
|
||||
query.add(paramValue);
|
||||
break;
|
||||
case "full":
|
||||
// New fulltext search criteria
|
||||
fullQuery.add(params[1]);
|
||||
fullQuery.add(paramValue);
|
||||
break;
|
||||
case "title":
|
||||
// New title criteria
|
||||
documentCriteria.setTitle(params[1]);
|
||||
documentCriteria.setTitle(paramValue);
|
||||
break;
|
||||
default:
|
||||
fullQuery.add(criteria);
|
||||
|
||||
@@ -21,6 +21,7 @@ import com.sismics.docs.core.util.FileUtil;
|
||||
import com.sismics.rest.exception.ClientException;
|
||||
import com.sismics.rest.exception.ForbiddenClientException;
|
||||
import com.sismics.rest.exception.ServerException;
|
||||
import com.sismics.rest.util.RestUtil;
|
||||
import com.sismics.rest.util.ValidationUtil;
|
||||
import com.sismics.util.HttpUtil;
|
||||
import com.sismics.util.JsonUtil;
|
||||
@@ -42,6 +43,7 @@ import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.URISyntaxException;
|
||||
import java.net.URLDecoder;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Paths;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
@@ -111,10 +113,12 @@ public class FileResource extends BaseResource {
|
||||
}
|
||||
|
||||
// Keep unencrypted data temporary on disk
|
||||
String name = fileBodyPart.getContentDisposition() != null ?
|
||||
URLDecoder.decode(fileBodyPart.getContentDisposition().getFileName(), StandardCharsets.UTF_8) : null;
|
||||
java.nio.file.Path unencryptedFile;
|
||||
long fileSize;
|
||||
try {
|
||||
unencryptedFile = AppContext.getInstance().getFileService().createTemporaryFile();
|
||||
unencryptedFile = AppContext.getInstance().getFileService().createTemporaryFile(name);
|
||||
Files.copy(fileBodyPart.getValueAs(InputStream.class), unencryptedFile, StandardCopyOption.REPLACE_EXISTING);
|
||||
fileSize = Files.size(unencryptedFile);
|
||||
} catch (IOException e) {
|
||||
@@ -122,8 +126,6 @@ public class FileResource extends BaseResource {
|
||||
}
|
||||
|
||||
try {
|
||||
String name = fileBodyPart.getContentDisposition() != null ?
|
||||
URLDecoder.decode(fileBodyPart.getContentDisposition().getFileName(), "UTF-8") : null;
|
||||
String fileId = FileUtil.createFile(name, previousFileId, unencryptedFile, fileSize, documentDto == null ?
|
||||
null : documentDto.getLanguage(), principal.getId(), documentId);
|
||||
|
||||
@@ -427,27 +429,13 @@ public class FileResource extends BaseResource {
|
||||
}
|
||||
|
||||
FileDao fileDao = new FileDao();
|
||||
List<File> fileList = fileDao.getByDocumentId(principal.getId(), documentId);
|
||||
|
||||
JsonArrayBuilder files = Json.createArrayBuilder();
|
||||
for (File fileDb : fileList) {
|
||||
try {
|
||||
files.add(Json.createObjectBuilder()
|
||||
.add("id", fileDb.getId())
|
||||
.add("processing", FileUtil.isProcessingFile(fileDb.getId()))
|
||||
.add("name", JsonUtil.nullable(fileDb.getName()))
|
||||
.add("version", fileDb.getVersion())
|
||||
.add("mimetype", fileDb.getMimeType())
|
||||
.add("document_id", JsonUtil.nullable(fileDb.getDocumentId()))
|
||||
.add("create_date", fileDb.getCreateDate().getTime())
|
||||
.add("size", Files.size(DirectoryUtil.getStorageDirectory().resolve(fileDb.getId()))));
|
||||
} catch (IOException e) {
|
||||
throw new ServerException("FileError", "Unable to get the size of " + fileDb.getId(), e);
|
||||
for (File fileDb : fileDao.getByDocumentId(principal.getId(), documentId)) {
|
||||
files.add(RestUtil.fileToJsonObjectBuilder(fileDb));
|
||||
}
|
||||
}
|
||||
|
||||
JsonObjectBuilder response = Json.createObjectBuilder()
|
||||
.add("files", files);
|
||||
|
||||
return Response.ok().entity(response.build()).build();
|
||||
}
|
||||
|
||||
@@ -587,6 +575,7 @@ public class FileResource extends BaseResource {
|
||||
*/
|
||||
@GET
|
||||
@Path("{id: [a-z0-9\\-]+}/data")
|
||||
@Produces(MediaType.APPLICATION_OCTET_STREAM)
|
||||
public Response data(
|
||||
@PathParam("id") final String fileId,
|
||||
@QueryParam("share") String shareId,
|
||||
@@ -676,23 +665,24 @@ public class FileResource extends BaseResource {
|
||||
/**
|
||||
* Returns all files from a document, zipped.
|
||||
*
|
||||
* @api {get} /file/zip Get zipped files
|
||||
* @api {get} /file/zip Returns all files from a document, zipped.
|
||||
* @apiName GetFileZip
|
||||
* @apiGroup File
|
||||
* @apiParam {String} id Document ID
|
||||
* @apiParam {String} share Share ID
|
||||
* @apiSuccess {Object} file The ZIP file is the whole response
|
||||
* @apiError (client) NotFound Document not found
|
||||
* @apiError (client) NotFoundException Document not found
|
||||
* @apiError (server) InternalServerError Error creating the ZIP file
|
||||
* @apiPermission none
|
||||
* @apiVersion 1.5.0
|
||||
*
|
||||
* @param documentId Document ID
|
||||
* @param shareId Share ID
|
||||
* @return Response
|
||||
*/
|
||||
@GET
|
||||
@Path("zip")
|
||||
@Produces(MediaType.APPLICATION_OCTET_STREAM)
|
||||
@Produces({MediaType.APPLICATION_OCTET_STREAM, MediaType.TEXT_PLAIN})
|
||||
public Response zip(
|
||||
@QueryParam("id") String documentId,
|
||||
@QueryParam("share") String shareId) {
|
||||
@@ -705,10 +695,44 @@ public class FileResource extends BaseResource {
|
||||
throw new NotFoundException();
|
||||
}
|
||||
|
||||
// Get files and user associated with this document
|
||||
// Get files associated with this document
|
||||
FileDao fileDao = new FileDao();
|
||||
final UserDao userDao = new UserDao();
|
||||
final List<File> fileList = fileDao.getByDocumentId(principal.getId(), documentId);
|
||||
String zipFileName = documentDto.getTitle().replaceAll("\\W+", "_");
|
||||
return sendZippedFiles(zipFileName, fileList);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a list of files, zipped
|
||||
*
|
||||
* @api {post} /file/zip Returns a list of files, zipped
|
||||
* @apiName GetFilesZip
|
||||
* @apiGroup File
|
||||
* @apiParam {String[]} files IDs
|
||||
* @apiSuccess {Object} file The ZIP file is the whole response
|
||||
* @apiError (client) NotFoundException Files not found
|
||||
* @apiError (server) InternalServerError Error creating the ZIP file
|
||||
* @apiPermission none
|
||||
* @apiVersion 1.11.0
|
||||
*
|
||||
* @param filesIdsList Files IDs
|
||||
* @return Response
|
||||
*/
|
||||
@POST
|
||||
@Path("zip")
|
||||
@Produces({MediaType.APPLICATION_OCTET_STREAM, MediaType.TEXT_PLAIN})
|
||||
public Response zip(
|
||||
@FormParam("files") List<String> filesIdsList) {
|
||||
authenticate();
|
||||
List<File> fileList = findFiles(filesIdsList);
|
||||
return sendZippedFiles("files", fileList);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sent the content of a list of files.
|
||||
*/
|
||||
private Response sendZippedFiles(String zipFileName, List<File> fileList) {
|
||||
final UserDao userDao = new UserDao();
|
||||
|
||||
// Create the ZIP stream
|
||||
StreamingOutput stream = outputStream -> {
|
||||
@@ -739,7 +763,7 @@ public class FileResource extends BaseResource {
|
||||
// Write to the output
|
||||
return Response.ok(stream)
|
||||
.header("Content-Type", "application/zip")
|
||||
.header("Content-Disposition", "attachment; filename=\"" + documentDto.getTitle().replaceAll("\\W+", "_") + ".zip\"")
|
||||
.header("Content-Disposition", "attachment; filename=\"" + zipFileName + ".zip\"")
|
||||
.build();
|
||||
}
|
||||
|
||||
@@ -756,7 +780,32 @@ public class FileResource extends BaseResource {
|
||||
if (file == null) {
|
||||
throw new NotFoundException();
|
||||
}
|
||||
checkFileAccessible(shareId, file);
|
||||
return file;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Find a list of files with access rights checking.
|
||||
*
|
||||
* @param filesIds Files IDs
|
||||
* @return List<File>
|
||||
*/
|
||||
private List<File> findFiles(List<String> filesIds) {
|
||||
FileDao fileDao = new FileDao();
|
||||
List<File> files = fileDao.getFiles(filesIds);
|
||||
for (File file : files) {
|
||||
checkFileAccessible(null, file);
|
||||
}
|
||||
return files;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a file is accessible to the current user
|
||||
* @param shareId Share ID
|
||||
* @param file
|
||||
*/
|
||||
private void checkFileAccessible(String shareId, File file) {
|
||||
if (file.getDocumentId() == null) {
|
||||
// It's an orphan file
|
||||
if (!file.getUserId().equals(principal.getId())) {
|
||||
@@ -770,6 +819,5 @@ public class FileResource extends BaseResource {
|
||||
throw new ForbiddenClientException();
|
||||
}
|
||||
}
|
||||
return file;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -313,7 +313,7 @@ public class GroupResource extends BaseResource {
|
||||
* @return Response
|
||||
*/
|
||||
@DELETE
|
||||
@Path("{groupName: [a-zA-Z0-9_]+}/{username: [a-zA-Z0-9_]+}")
|
||||
@Path("{groupName: [a-zA-Z0-9_]+}/{username: [a-zA-Z0-9_@\\.]+}")
|
||||
public Response removeMember(@PathParam("groupName") String groupName,
|
||||
@PathParam("username") String username) {
|
||||
if (!authenticate()) {
|
||||
|
||||
@@ -88,7 +88,7 @@ public class UserResource extends BaseResource {
|
||||
|
||||
// Validate the input data
|
||||
username = ValidationUtil.validateLength(username, "username", 3, 50);
|
||||
ValidationUtil.validateAlphanumeric(username, "username");
|
||||
ValidationUtil.validateUsername(username, "username");
|
||||
password = ValidationUtil.validateLength(password, "password", 8, 50);
|
||||
email = ValidationUtil.validateLength(email, "email", 1, 100);
|
||||
Long storageQuota = ValidationUtil.validateLong(storageQuotaStr, "storage_quota");
|
||||
@@ -195,7 +195,7 @@ public class UserResource extends BaseResource {
|
||||
* @return Response
|
||||
*/
|
||||
@POST
|
||||
@Path("{username: [a-zA-Z0-9_]+}")
|
||||
@Path("{username: [a-zA-Z0-9_@\\.]+}")
|
||||
public Response update(
|
||||
@PathParam("username") String username,
|
||||
@FormParam("password") String password,
|
||||
@@ -511,7 +511,7 @@ public class UserResource extends BaseResource {
|
||||
* @return Response
|
||||
*/
|
||||
@DELETE
|
||||
@Path("{username: [a-zA-Z0-9_]+}")
|
||||
@Path("{username: [a-zA-Z0-9_@\\.]+}")
|
||||
public Response delete(@PathParam("username") String username) {
|
||||
if (!authenticate()) {
|
||||
throw new ForbiddenClientException();
|
||||
@@ -591,7 +591,7 @@ public class UserResource extends BaseResource {
|
||||
* @return Response
|
||||
*/
|
||||
@POST
|
||||
@Path("{username: [a-zA-Z0-9_]+}/disable_totp")
|
||||
@Path("{username: [a-zA-Z0-9_@\\.]+}/disable_totp")
|
||||
public Response disableTotpUsername(@PathParam("username") String username) {
|
||||
if (!authenticate()) {
|
||||
throw new ForbiddenClientException();
|
||||
@@ -713,7 +713,7 @@ public class UserResource extends BaseResource {
|
||||
* @return Response
|
||||
*/
|
||||
@GET
|
||||
@Path("{username: [a-zA-Z0-9_]+}")
|
||||
@Path("{username: [a-zA-Z0-9_@\\.]+}")
|
||||
@Produces(MediaType.APPLICATION_JSON)
|
||||
public Response view(@PathParam("username") String username) {
|
||||
if (!authenticate()) {
|
||||
@@ -1064,7 +1064,6 @@ public class UserResource extends BaseResource {
|
||||
* @apiGroup User
|
||||
* @apiParam {String} username Username
|
||||
* @apiSuccess {String} status Status OK
|
||||
* @apiError (client) UserNotFound The user is not found
|
||||
* @apiError (client) ValidationError Validation error
|
||||
* @apiPermission none
|
||||
* @apiVersion 1.5.0
|
||||
@@ -1081,11 +1080,16 @@ public class UserResource extends BaseResource {
|
||||
// Validate input data
|
||||
ValidationUtil.validateStringNotBlank("username", username);
|
||||
|
||||
// Prepare response
|
||||
Response response = Response.ok().entity(Json.createObjectBuilder()
|
||||
.add("status", "ok")
|
||||
.build()).build();
|
||||
|
||||
// Check for user existence
|
||||
UserDao userDao = new UserDao();
|
||||
List<UserDto> userDtoList = userDao.findByCriteria(new UserCriteria().setUserName(username), null);
|
||||
if (userDtoList.isEmpty()) {
|
||||
throw new ClientException("UserNotFound", "User not found: " + username);
|
||||
return response;
|
||||
}
|
||||
UserDto user = userDtoList.get(0);
|
||||
|
||||
@@ -1102,9 +1106,7 @@ public class UserResource extends BaseResource {
|
||||
AppContext.getInstance().getMailEventBus().post(passwordLostEvent);
|
||||
|
||||
// Always return OK
|
||||
JsonObjectBuilder response = Json.createObjectBuilder()
|
||||
.add("status", "ok");
|
||||
return Response.ok().entity(response.build()).build();
|
||||
return response;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -10,7 +10,7 @@ The base URL depends on your server. If your instance of Teedy is accessible thr
|
||||
`https://teedy.mycompany.com`, then the base API URL is `https://teedy.mycompany.com/api`.
|
||||
|
||||
## Verbs and status codes
|
||||
The API uses restful verbs.
|
||||
The API uses RESTful verbs.
|
||||
|
||||
| Verb | Description |
|
||||
|---|---|
|
||||
@@ -47,3 +47,42 @@ A call to this API with a given `auth_token` cookie will make it unusable for ot
|
||||
```
|
||||
curl -i -X POST -H "Cookie: auth_token=64085630-2ae6-415c-9a92-4b22c107eaa4" https://docs.mycompany.com/api/user/logout
|
||||
```
|
||||
|
||||
## Document search syntax
|
||||
|
||||
The `/api/document/list` endpoint use a String `search` parameter.
|
||||
|
||||
This parameter is split in segments using the space character (the other whitespace characters are not considered).
|
||||
|
||||
If a segment contains exactly one colon (`:`), it will used as a field criteria (see bellow).
|
||||
In other cases (zero or more than one colon), the segment will be used as a search criteria for all fields including the document's files content.
|
||||
|
||||
### Search fields
|
||||
|
||||
If a search `VALUE` is considered invalid, the search result will be empty.
|
||||
|
||||
* Content
|
||||
* `full:VALUE`: `VALUE` is used as search criteria for all fields, including the document's files content
|
||||
* `simple:VALUE`: `VALUE` is used as a search criteria for all fields except the document's files content
|
||||
* Date
|
||||
* `after:VALUE`: the document must have been created after or at the `VALUE` moment, accepted format are `yyyy`, `yyyy-MM` and `yyyy-MM-dd`
|
||||
* `at:VALUE`: the document must have been created at the `VALUE` moment, accepted format are `yyyy`, `yyyy-MM` and `yyyy-MM-dd` (for `yyyy` it must be the same year, for `yyyy-MM` the same month, for `yyyy-MM-dd` the same day)
|
||||
* `before:VALUE`: the document must have been created before or at the `VALUE` moment, accepted format are `yyyy`, `yyyy-MM` and `yyyy-MM-dd`
|
||||
* `uafter:VALUE`: the document must have been last updated after or at the `VALUE` moment, accepted format are `yyyy`, `yyyy-MM` and `yyyy-MM-dd`
|
||||
* `at:VALUE`: the document must have been updated at the `VALUE` moment, accepted format are `yyyy`, `yyyy-MM` and `yyyy-MM-dd` (for `yyyy` it must be the same year, for `yyyy-MM` the same month, for `yyyy-MM-dd` the same day)
|
||||
* `ubefore:VALUE`: the document must have been updated before or at the `VALUE` moment, accepted format are `yyyy`, `yyyy-MM` and `yyyy-MM-dd`
|
||||
* Language
|
||||
* `lang:VALUE`: the document must be of the specified language (example: `en`)
|
||||
* Mime
|
||||
* `mime:VALUE`: the document must be of the specified mime type (example: `image/png`)
|
||||
* Shared
|
||||
* `shared:VALUE`: if `VALUE` is `yes`the document must be shared, for other `VALUE`s the criteria is ignored
|
||||
* Tags
|
||||
* `tag:VALUE`: the document must contain a tag or a child of a tag that starts with `VALUE`, case is ignored
|
||||
* `!tag:VALUE`: the document must not contain a tag or a child of a tag that starts with `VALUE`, case is ignored
|
||||
* Title
|
||||
* `title:VALUE`: the title of the document must be `VALUE`
|
||||
* User
|
||||
* `by:VALUE`: the document creator's username must be `VALUE` with an exact match, the user must not be deleted
|
||||
* Workflow
|
||||
* `workflow:VALUE`: if `VALUE` is `me` the document must have an active route, for other `VALUE`s the criteria is ignored
|
||||
|
||||
@@ -545,7 +545,8 @@ angular.module('docs',
|
||||
{ key: 'lav', label: 'Latviešu' },
|
||||
{ key: 'dan', label: 'Dansk' },
|
||||
{ key: 'nor', label: 'Norsk' },
|
||||
{ key: 'vie', label: 'Tiếng Việt' }
|
||||
{ key: 'vie', label: 'Tiếng Việt' },
|
||||
{ key: 'ces', label: 'Czech' }
|
||||
];
|
||||
})
|
||||
/**
|
||||
|
||||
@@ -25,7 +25,7 @@
|
||||
"message": "Please enter a new password",
|
||||
"submit": "Change my password",
|
||||
"error_title": "Error changing your password",
|
||||
"error_message": "Your password recovery request is expired, please ask a new one on the login page"
|
||||
"error_message": "Your password recovery request is expired, please ask for a new one on the login page"
|
||||
},
|
||||
"index": {
|
||||
"toggle_navigation": "Toggle navigation",
|
||||
@@ -360,7 +360,7 @@
|
||||
"message_2": "Those applications automatically generate a validation code that changes after a certain period of time.<br/>You will be required to enter this validation code each time you login on <strong>{{ appName }}</strong>.",
|
||||
"secret_key": "Your secret key is: <strong>{{ secret }}</strong>",
|
||||
"secret_key_warning": "Configure your TOTP app on your phone with this secret key now, you will not be able to access it later.",
|
||||
"totp_enabled_message": "Two-factor authentication is enabled on your account.<br/>Each time you login on <strong>{{ appName }}</strong>, you will be asked a validation code from your configured phone app.<br/>If you lose your phone, you will not be able to login into your account but active sessions will allow you to regenerate a secrey key.",
|
||||
"totp_enabled_message": "Two-factor authentication is enabled on your account.<br/>Each time you login on <strong>{{ appName }}</strong>, you will be asked for a validation code from your configured phone app.<br/>If you lose your phone, you will not be able to login into your account but active sessions will allow you to regenerate a secrey key.",
|
||||
"disable_totp": {
|
||||
"disable_totp": "Disable two-factor authentication",
|
||||
"message": "Your account will not be protected by the two-factor authentication anymore.",
|
||||
@@ -509,7 +509,7 @@
|
||||
"error_general": "An error occurred while trying to import your file, please make sure it is a valid EML file"
|
||||
},
|
||||
"app_share": {
|
||||
"main": "Ask a shared document link to access it",
|
||||
"main": "Ask for a shared document link to access it",
|
||||
"403": {
|
||||
"title": "Not authorized",
|
||||
"message": "The document you are trying to view is not shared anymore"
|
||||
|
||||
@@ -41,8 +41,8 @@
|
||||
img-error="error = true"
|
||||
ng-show="!error && canDisplayPreview()" />
|
||||
|
||||
<!-- Video player -->
|
||||
<a href class="video-overlay" ng-if="!error && file.mimetype.substring(0, 6) == 'video/'"
|
||||
<!-- Media player -->
|
||||
<a href class="video-overlay" ng-if="!error && (file.mimetype.substring(0, 6) == 'video/' || file.mimetype.substring(0, 6) == 'audio/')"
|
||||
ng-init="videoPlayer = false" ng-click="videoPlayer = true">
|
||||
<span class="fas fa-play-circle" ng-if="!videoPlayer"></span>
|
||||
<video ng-if="videoPlayer" autoplay="autoplay" loop="loop"
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
<label class="col-sm-2 control-label" for="inputUserUsername">{{ 'settings.user.edit.username' | translate }}</label>
|
||||
<div class="col-sm-7">
|
||||
<input name="userUsername" type="text" id="inputUserUsername" required ng-disabled="isEdit()" class="form-control"
|
||||
ng-pattern="/^[a-zA-Z0-9_]*$/"
|
||||
ng-pattern="/^[a-zA-Z0-9_@\.]*$/"
|
||||
ng-minlength="3" ng-maxlength="50" ng-attr-placeholder="{{ 'settings.user.edit.username' | translate }}" ng-model="user.username"/>
|
||||
</div>
|
||||
|
||||
|
||||
@@ -99,7 +99,7 @@ public class TestAuditLogResource extends BaseJerseyTest {
|
||||
long update1Date = json.getJsonNumber("update_date").longValue();
|
||||
|
||||
// Add a file to the document
|
||||
clientUtil.addFileToDocument("file/wikipedia.pdf", "wikipedia.pdf", auditlog1Token, document1Id);
|
||||
clientUtil.addFileToDocument(FILE_WIKIPEDIA_PDF, auditlog1Token, document1Id);
|
||||
|
||||
// Get document 1
|
||||
json = target().path("/document/" + document1Id).request()
|
||||
|
||||
@@ -4,8 +4,6 @@ import com.google.common.io.ByteStreams;
|
||||
import com.google.common.io.Resources;
|
||||
import com.sismics.docs.core.util.DirectoryUtil;
|
||||
import com.sismics.util.filter.TokenBasedSecurityFilter;
|
||||
import com.sismics.util.mime.MimeType;
|
||||
import com.sismics.util.mime.MimeTypeUtil;
|
||||
import org.glassfish.jersey.media.multipart.FormDataMultiPart;
|
||||
import org.glassfish.jersey.media.multipart.MultiPartFeature;
|
||||
import org.glassfish.jersey.media.multipart.file.StreamDataBodyPart;
|
||||
@@ -96,8 +94,8 @@ public class TestDocumentResource extends BaseJerseyTest {
|
||||
Assert.assertNotNull(document2Id);
|
||||
|
||||
// Add a file
|
||||
String file1Id = clientUtil.addFileToDocument("file/Einstein-Roosevelt-letter.png",
|
||||
"Einstein-Roosevelt-letter.png", document1Token, document1Id);
|
||||
String file1Id = clientUtil.addFileToDocument(FILE_EINSTEIN_ROOSEVELT_LETTER_PNG,
|
||||
document1Token, document1Id);
|
||||
|
||||
// Share this document
|
||||
target().path("/share").request()
|
||||
@@ -151,8 +149,8 @@ public class TestDocumentResource extends BaseJerseyTest {
|
||||
Assert.assertNotNull(document3Id);
|
||||
|
||||
// Add a file
|
||||
clientUtil.addFileToDocument("file/Einstein-Roosevelt-letter.png",
|
||||
"Einstein-Roosevelt-letter.png", document3Token, document3Id);
|
||||
clientUtil.addFileToDocument(FILE_EINSTEIN_ROOSEVELT_LETTER_PNG,
|
||||
document3Token, document3Id);
|
||||
|
||||
// List all documents from document3
|
||||
json = target().path("/document/list")
|
||||
@@ -264,6 +262,7 @@ public class TestDocumentResource extends BaseJerseyTest {
|
||||
Assert.assertEquals(document2Id, relations.getJsonObject(0).getString("id"));
|
||||
Assert.assertFalse(relations.getJsonObject(0).getBoolean("source"));
|
||||
Assert.assertEquals("My super title document 2", relations.getJsonObject(0).getString("title"));
|
||||
Assert.assertFalse(json.containsKey("files"));
|
||||
|
||||
// Get document 2
|
||||
json = target().path("/document/" + document2Id).request()
|
||||
@@ -275,6 +274,7 @@ public class TestDocumentResource extends BaseJerseyTest {
|
||||
Assert.assertEquals(document1Id, relations.getJsonObject(0).getString("id"));
|
||||
Assert.assertTrue(relations.getJsonObject(0).getBoolean("source"));
|
||||
Assert.assertEquals("My super title document 1", relations.getJsonObject(0).getString("title"));
|
||||
Assert.assertFalse(json.containsKey("files"));
|
||||
|
||||
// Create a tag
|
||||
json = target().path("/tag").request()
|
||||
@@ -330,6 +330,25 @@ public class TestDocumentResource extends BaseJerseyTest {
|
||||
.get(JsonObject.class);
|
||||
documents = json.getJsonArray("documents");
|
||||
Assert.assertEquals(1, documents.size());
|
||||
Assert.assertEquals(document1Id, documents.getJsonObject(0).getString("id"));
|
||||
Assert.assertFalse(documents.getJsonObject(0).containsKey("files"));
|
||||
|
||||
// Search documents by query with files
|
||||
json = target().path("/document/list")
|
||||
.queryParam("files", true)
|
||||
.queryParam("search", "new")
|
||||
.request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, document1Token)
|
||||
.get(JsonObject.class);
|
||||
documents = json.getJsonArray("documents");
|
||||
Assert.assertEquals(1, documents.size());
|
||||
Assert.assertEquals(1, documents.size());
|
||||
Assert.assertEquals(document1Id, documents.getJsonObject(0).getString("id"));
|
||||
JsonArray files = documents.getJsonObject(0).getJsonArray("files");
|
||||
Assert.assertEquals(1, files.size());
|
||||
Assert.assertEquals(file1Id, files.getJsonObject(0).getString("id"));
|
||||
Assert.assertEquals("Einstein-Roosevelt-letter.png", files.getJsonObject(0).getString("name"));
|
||||
Assert.assertEquals("image/png", files.getJsonObject(0).getString("mimetype"));
|
||||
|
||||
// Get document 1
|
||||
json = target().path("/document/" + document1Id).request()
|
||||
@@ -353,6 +372,19 @@ public class TestDocumentResource extends BaseJerseyTest {
|
||||
Assert.assertEquals("document1", contributors.getJsonObject(0).getString("username"));
|
||||
relations = json.getJsonArray("relations");
|
||||
Assert.assertEquals(0, relations.size());
|
||||
Assert.assertFalse(json.containsKey("files"));
|
||||
|
||||
// Get document 1 with its files
|
||||
json = target().path("/document/" + document1Id)
|
||||
.queryParam("files", true)
|
||||
.request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, document1Token)
|
||||
.get(JsonObject.class);
|
||||
files = json.getJsonArray("files");
|
||||
Assert.assertEquals(1, files.size());
|
||||
Assert.assertEquals(file1Id, files.getJsonObject(0).getString("id"));
|
||||
Assert.assertEquals("Einstein-Roosevelt-letter.png", files.getJsonObject(0).getString("name"));
|
||||
Assert.assertEquals("image/png", files.getJsonObject(0).getString("mimetype"));
|
||||
|
||||
// Get document 2
|
||||
json = target().path("/document/" + document1Id).request()
|
||||
@@ -367,6 +399,12 @@ public class TestDocumentResource extends BaseJerseyTest {
|
||||
.delete(JsonObject.class);
|
||||
Assert.assertEquals("ok", json.getString("status"));
|
||||
|
||||
// Deletes a non-existing document
|
||||
response = target().path("/document/69b79238-84bb-4263-a32f-9cbdf8c92188").request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, document1Token)
|
||||
.delete();
|
||||
Assert.assertEquals(Status.NOT_FOUND, Status.fromStatusCode(response.getStatus()));
|
||||
|
||||
// Check that the associated files are deleted from FS
|
||||
java.io.File storedFile = DirectoryUtil.getStorageDirectory().resolve(file1Id).toFile();
|
||||
java.io.File webFile = DirectoryUtil.getStorageDirectory().resolve(file1Id + "_web").toFile();
|
||||
@@ -410,22 +448,13 @@ public class TestDocumentResource extends BaseJerseyTest {
|
||||
String documentOdtToken = clientUtil.login("document_odt");
|
||||
|
||||
// Create a document
|
||||
long create1Date = new Date().getTime();
|
||||
JsonObject json = target().path("/document").request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, documentOdtToken)
|
||||
.put(Entity.form(new Form()
|
||||
.param("title", "My super title document 1")
|
||||
.param("description", "My super description for document 1")
|
||||
.param("language", "eng")
|
||||
.param("create_date", Long.toString(create1Date))), JsonObject.class);
|
||||
String document1Id = json.getString("id");
|
||||
Assert.assertNotNull(document1Id);
|
||||
String document1Id = clientUtil.createDocument(documentOdtToken);
|
||||
|
||||
// Add a PDF file
|
||||
String file1Id = clientUtil.addFileToDocument("file/document.odt", "document.odt", documentOdtToken, document1Id);
|
||||
String file1Id = clientUtil.addFileToDocument(FILE_DOCUMENT_ODT, documentOdtToken, document1Id);
|
||||
|
||||
// Search documents by query in full content
|
||||
json = target().path("/document/list")
|
||||
JsonObject json = target().path("/document/list")
|
||||
.queryParam("search", "full:ipsum")
|
||||
.request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, documentOdtToken)
|
||||
@@ -441,7 +470,6 @@ public class TestDocumentResource extends BaseJerseyTest {
|
||||
InputStream is = (InputStream) response.getEntity();
|
||||
byte[] fileBytes = ByteStreams.toByteArray(is);
|
||||
Assert.assertTrue(fileBytes.length > 0); // Images rendered from PDF differ in size from OS to OS due to font issues
|
||||
Assert.assertEquals(MimeType.IMAGE_JPEG, MimeTypeUtil.guessMimeType(fileBytes, null));
|
||||
|
||||
// Export a document in PDF format
|
||||
response = target().path("/document/" + document1Id + "/pdf")
|
||||
@@ -470,22 +498,13 @@ public class TestDocumentResource extends BaseJerseyTest {
|
||||
String documentDocxToken = clientUtil.login("document_docx");
|
||||
|
||||
// Create a document
|
||||
long create1Date = new Date().getTime();
|
||||
JsonObject json = target().path("/document").request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, documentDocxToken)
|
||||
.put(Entity.form(new Form()
|
||||
.param("title", "My super title document 1")
|
||||
.param("description", "My super description for document 1")
|
||||
.param("language", "eng")
|
||||
.param("create_date", Long.toString(create1Date))), JsonObject.class);
|
||||
String document1Id = json.getString("id");
|
||||
Assert.assertNotNull(document1Id);
|
||||
String document1Id = clientUtil.createDocument(documentDocxToken);
|
||||
|
||||
// Add a PDF file
|
||||
String file1Id = clientUtil.addFileToDocument("file/document.docx", "document.docx", documentDocxToken, document1Id);
|
||||
String file1Id = clientUtil.addFileToDocument(FILE_DOCUMENT_DOCX, documentDocxToken, document1Id);
|
||||
|
||||
// Search documents by query in full content
|
||||
json = target().path("/document/list")
|
||||
JsonObject json = target().path("/document/list")
|
||||
.queryParam("search", "full:dolor")
|
||||
.request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, documentDocxToken)
|
||||
@@ -501,7 +520,6 @@ public class TestDocumentResource extends BaseJerseyTest {
|
||||
InputStream is = (InputStream) response.getEntity();
|
||||
byte[] fileBytes = ByteStreams.toByteArray(is);
|
||||
Assert.assertTrue(fileBytes.length > 0); // Images rendered from PDF differ in size from OS to OS due to font issues
|
||||
Assert.assertEquals(MimeType.IMAGE_JPEG, MimeTypeUtil.guessMimeType(fileBytes, null));
|
||||
|
||||
// Export a document in PDF format
|
||||
response = target().path("/document/" + document1Id + "/pdf")
|
||||
@@ -530,22 +548,13 @@ public class TestDocumentResource extends BaseJerseyTest {
|
||||
String documentPdfToken = clientUtil.login("document_pdf");
|
||||
|
||||
// Create a document
|
||||
long create1Date = new Date().getTime();
|
||||
JsonObject json = target().path("/document").request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, documentPdfToken)
|
||||
.put(Entity.form(new Form()
|
||||
.param("title", "My super title document 1")
|
||||
.param("description", "My super description for document 1")
|
||||
.param("language", "eng")
|
||||
.param("create_date", Long.toString(create1Date))), JsonObject.class);
|
||||
String document1Id = json.getString("id");
|
||||
Assert.assertNotNull(document1Id);
|
||||
String document1Id = clientUtil.createDocument(documentPdfToken);
|
||||
|
||||
// Add a PDF file
|
||||
String file1Id = clientUtil.addFileToDocument("file/wikipedia.pdf", "wikipedia.pdf", documentPdfToken, document1Id);
|
||||
String file1Id = clientUtil.addFileToDocument(FILE_WIKIPEDIA_PDF, documentPdfToken, document1Id);
|
||||
|
||||
// Search documents by query in full content
|
||||
json = target().path("/document/list")
|
||||
JsonObject json = target().path("/document/list")
|
||||
.queryParam("search", "full:vrandecic")
|
||||
.request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, documentPdfToken)
|
||||
@@ -561,7 +570,6 @@ public class TestDocumentResource extends BaseJerseyTest {
|
||||
InputStream is = (InputStream) response.getEntity();
|
||||
byte[] fileBytes = ByteStreams.toByteArray(is);
|
||||
Assert.assertTrue(fileBytes.length > 0); // Images rendered from PDF differ in size from OS to OS due to font issues
|
||||
Assert.assertEquals(MimeType.IMAGE_JPEG, MimeTypeUtil.guessMimeType(fileBytes, null));
|
||||
|
||||
// Export a document in PDF format
|
||||
response = target().path("/document/" + document1Id + "/pdf")
|
||||
@@ -590,22 +598,13 @@ public class TestDocumentResource extends BaseJerseyTest {
|
||||
String documentPlainToken = clientUtil.login("document_plain");
|
||||
|
||||
// Create a document
|
||||
long create1Date = new Date().getTime();
|
||||
JsonObject json = target().path("/document").request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, documentPlainToken)
|
||||
.put(Entity.form(new Form()
|
||||
.param("title", "My super title document 1")
|
||||
.param("description", "My super description for document 1")
|
||||
.param("language", "eng")
|
||||
.param("create_date", Long.toString(create1Date))), JsonObject.class);
|
||||
String document1Id = json.getString("id");
|
||||
Assert.assertNotNull(document1Id);
|
||||
String document1Id = clientUtil.createDocument(documentPlainToken);
|
||||
|
||||
// Add a plain text file
|
||||
String file1Id = clientUtil.addFileToDocument("file/document.txt", "document.txt", documentPlainToken, document1Id);
|
||||
String file1Id = clientUtil.addFileToDocument(FILE_DOCUMENT_TXT, documentPlainToken, document1Id);
|
||||
|
||||
// Search documents by query in full content
|
||||
json = target().path("/document/list")
|
||||
JsonObject json = target().path("/document/list")
|
||||
.queryParam("search", "full:love")
|
||||
.request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, documentPlainToken)
|
||||
@@ -621,7 +620,6 @@ public class TestDocumentResource extends BaseJerseyTest {
|
||||
InputStream is = (InputStream) response.getEntity();
|
||||
byte[] fileBytes = ByteStreams.toByteArray(is);
|
||||
Assert.assertTrue(fileBytes.length > 0); // Images rendered from PDF differ in size from OS to OS due to font issues
|
||||
Assert.assertEquals(MimeType.IMAGE_JPEG, MimeTypeUtil.guessMimeType(fileBytes, null));
|
||||
|
||||
// Get the content data
|
||||
response = target().path("/file/" + file1Id + "/data")
|
||||
@@ -660,22 +658,13 @@ public class TestDocumentResource extends BaseJerseyTest {
|
||||
String documentVideoToken = clientUtil.login("document_video");
|
||||
|
||||
// Create a document
|
||||
long create1Date = new Date().getTime();
|
||||
JsonObject json = target().path("/document").request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, documentVideoToken)
|
||||
.put(Entity.form(new Form()
|
||||
.param("title", "My super title document 1")
|
||||
.param("description", "My super description for document 1")
|
||||
.param("language", "eng")
|
||||
.param("create_date", Long.toString(create1Date))), JsonObject.class);
|
||||
String document1Id = json.getString("id");
|
||||
Assert.assertNotNull(document1Id);
|
||||
String document1Id = clientUtil.createDocument(documentVideoToken);
|
||||
|
||||
// Add a video file
|
||||
String file1Id = clientUtil.addFileToDocument("file/video.webm", "video.webm", documentVideoToken, document1Id);
|
||||
String file1Id = clientUtil.addFileToDocument(FILE_VIDEO_WEBM, documentVideoToken, document1Id);
|
||||
|
||||
// Search documents by query in full content
|
||||
json = target().path("/document/list")
|
||||
JsonObject json = target().path("/document/list")
|
||||
.queryParam("search", "full:vp9")
|
||||
.request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, documentVideoToken)
|
||||
@@ -691,7 +680,6 @@ public class TestDocumentResource extends BaseJerseyTest {
|
||||
InputStream is = (InputStream) response.getEntity();
|
||||
byte[] fileBytes = ByteStreams.toByteArray(is);
|
||||
Assert.assertTrue(fileBytes.length > 0); // Images rendered from PDF differ in size from OS to OS due to font issues
|
||||
Assert.assertEquals(MimeType.IMAGE_JPEG, MimeTypeUtil.guessMimeType(fileBytes, null));
|
||||
|
||||
// Export a document in PDF format
|
||||
response = target().path("/document/" + document1Id + "/pdf")
|
||||
@@ -720,22 +708,13 @@ public class TestDocumentResource extends BaseJerseyTest {
|
||||
String documentPptxToken = clientUtil.login("document_pptx");
|
||||
|
||||
// Create a document
|
||||
long create1Date = new Date().getTime();
|
||||
JsonObject json = target().path("/document").request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, documentPptxToken)
|
||||
.put(Entity.form(new Form()
|
||||
.param("title", "My super title document 1")
|
||||
.param("description", "My super description for document 1")
|
||||
.param("language", "eng")
|
||||
.param("create_date", Long.toString(create1Date))), JsonObject.class);
|
||||
String document1Id = json.getString("id");
|
||||
Assert.assertNotNull(document1Id);
|
||||
String document1Id = clientUtil.createDocument(documentPptxToken);
|
||||
|
||||
// Add a PPTX file
|
||||
String file1Id = clientUtil.addFileToDocument("file/apache.pptx", "apache.pptx", documentPptxToken, document1Id);
|
||||
String file1Id = clientUtil.addFileToDocument(FILE_APACHE_PPTX, documentPptxToken, document1Id);
|
||||
|
||||
// Search documents by query in full content
|
||||
json = target().path("/document/list")
|
||||
JsonObject json = target().path("/document/list")
|
||||
.queryParam("search", "full:scaling")
|
||||
.request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, documentPptxToken)
|
||||
@@ -751,7 +730,6 @@ public class TestDocumentResource extends BaseJerseyTest {
|
||||
InputStream is = (InputStream) response.getEntity();
|
||||
byte[] fileBytes = ByteStreams.toByteArray(is);
|
||||
Assert.assertTrue(fileBytes.length > 0); // Images rendered from PDF differ in size from OS to OS due to font issues
|
||||
Assert.assertEquals(MimeType.IMAGE_JPEG, MimeTypeUtil.guessMimeType(fileBytes, null));
|
||||
|
||||
// Export a document in PDF format
|
||||
response = target().path("/document/" + document1Id + "/pdf")
|
||||
|
||||
@@ -23,6 +23,7 @@ import java.io.InputStream;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Date;
|
||||
import java.util.zip.ZipInputStream;
|
||||
|
||||
/**
|
||||
* Exhaustive test of the file resource.
|
||||
@@ -37,53 +38,18 @@ public class TestFileResource extends BaseJerseyTest {
|
||||
*/
|
||||
@Test
|
||||
public void testFileResource() throws Exception {
|
||||
// Login file1
|
||||
clientUtil.createUser("file1");
|
||||
String file1Token = clientUtil.login("file1");
|
||||
// Login file_resources
|
||||
clientUtil.createUser("file_resources");
|
||||
String file1Token = clientUtil.login("file_resources");
|
||||
|
||||
// Create a document
|
||||
long create1Date = new Date().getTime();
|
||||
JsonObject json = target().path("/document").request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, file1Token)
|
||||
.put(Entity.form(new Form()
|
||||
.param("title", "File test document 1")
|
||||
.param("language", "eng")
|
||||
.param("create_date", Long.toString(create1Date))), JsonObject.class);
|
||||
String document1Id = json.getString("id");
|
||||
Assert.assertNotNull(document1Id);
|
||||
String document1Id = clientUtil.createDocument(file1Token);
|
||||
|
||||
// Add a file
|
||||
String file1Id;
|
||||
try (InputStream is = Resources.getResource("file/PIA00452.jpg").openStream()) {
|
||||
StreamDataBodyPart streamDataBodyPart = new StreamDataBodyPart("file", is, "PIA00452.jpg");
|
||||
try (FormDataMultiPart multiPart = new FormDataMultiPart()) {
|
||||
json = target()
|
||||
.register(MultiPartFeature.class)
|
||||
.path("/file").request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, file1Token)
|
||||
.put(Entity.entity(multiPart.field("id", document1Id).bodyPart(streamDataBodyPart),
|
||||
MediaType.MULTIPART_FORM_DATA_TYPE), JsonObject.class);
|
||||
file1Id = json.getString("id");
|
||||
Assert.assertNotNull(file1Id);
|
||||
Assert.assertEquals(163510L, json.getJsonNumber("size").longValue());
|
||||
}
|
||||
}
|
||||
String file1Id = clientUtil.addFileToDocument(FILE_PIA_00452_JPG, file1Token, document1Id);
|
||||
|
||||
// Add a file
|
||||
String file2Id;
|
||||
try (InputStream is = Resources.getResource("file/PIA00452.jpg").openStream()) {
|
||||
StreamDataBodyPart streamDataBodyPart = new StreamDataBodyPart("file", is, "PIA00452.jpg");
|
||||
try (FormDataMultiPart multiPart = new FormDataMultiPart()) {
|
||||
json = target()
|
||||
.register(MultiPartFeature.class)
|
||||
.path("/file").request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, file1Token)
|
||||
.put(Entity.entity(multiPart.field("id", document1Id).bodyPart(streamDataBodyPart),
|
||||
MediaType.MULTIPART_FORM_DATA_TYPE), JsonObject.class);
|
||||
file2Id = json.getString("id");
|
||||
Assert.assertNotNull(file2Id);
|
||||
}
|
||||
}
|
||||
String file2Id = clientUtil.addFileToDocument(FILE_PIA_00452_JPG, file1Token, document1Id);
|
||||
|
||||
// Get the file data
|
||||
Response response = target().path("/file/" + file1Id + "/data").request()
|
||||
@@ -91,7 +57,6 @@ public class TestFileResource extends BaseJerseyTest {
|
||||
.get();
|
||||
InputStream is = (InputStream) response.getEntity();
|
||||
byte[] fileBytes = ByteStreams.toByteArray(is);
|
||||
Assert.assertEquals(MimeType.IMAGE_JPEG, MimeTypeUtil.guessMimeType(fileBytes, null));
|
||||
Assert.assertTrue(fileBytes.length > 0);
|
||||
|
||||
// Get the thumbnail data
|
||||
@@ -103,7 +68,6 @@ public class TestFileResource extends BaseJerseyTest {
|
||||
Assert.assertEquals(Status.OK, Status.fromStatusCode(response.getStatus()));
|
||||
is = (InputStream) response.getEntity();
|
||||
fileBytes = ByteStreams.toByteArray(is);
|
||||
Assert.assertEquals(MimeType.IMAGE_JPEG, MimeTypeUtil.guessMimeType(fileBytes, null));
|
||||
Assert.assertTrue(fileBytes.length > 0);
|
||||
|
||||
// Get the content data
|
||||
@@ -123,7 +87,6 @@ public class TestFileResource extends BaseJerseyTest {
|
||||
Assert.assertEquals(Status.OK, Status.fromStatusCode(response.getStatus()));
|
||||
is = (InputStream) response.getEntity();
|
||||
fileBytes = ByteStreams.toByteArray(is);
|
||||
Assert.assertEquals(MimeType.IMAGE_JPEG, MimeTypeUtil.guessMimeType(fileBytes, null));
|
||||
Assert.assertTrue(fileBytes.length > 0);
|
||||
|
||||
// Check that the files are not readable directly from FS
|
||||
@@ -131,7 +94,7 @@ public class TestFileResource extends BaseJerseyTest {
|
||||
Assert.assertEquals(MimeType.DEFAULT, MimeTypeUtil.guessMimeType(storedFile, null));
|
||||
|
||||
// Get all files from a document
|
||||
json = target().path("/file/list")
|
||||
JsonObject json = target().path("/file/list")
|
||||
.queryParam("id", document1Id)
|
||||
.request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, file1Token)
|
||||
@@ -191,9 +154,6 @@ public class TestFileResource extends BaseJerseyTest {
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, file1Token)
|
||||
.get();
|
||||
Assert.assertEquals(Status.OK, Status.fromStatusCode(response.getStatus()));
|
||||
is = (InputStream) response.getEntity();
|
||||
fileBytes = ByteStreams.toByteArray(is);
|
||||
Assert.assertEquals(MimeType.APPLICATION_ZIP, MimeTypeUtil.guessMimeType(fileBytes, null));
|
||||
|
||||
// Deletes a file
|
||||
json = target().path("/file/" + file1Id).request()
|
||||
@@ -294,44 +254,82 @@ public class TestFileResource extends BaseJerseyTest {
|
||||
Assert.assertEquals(1, files.getJsonObject(0).getInt("version"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFileResourceZip() throws Exception {
|
||||
// Login file_resources
|
||||
clientUtil.createUser("file_resources_zip");
|
||||
String file1Token = clientUtil.login("file_resources_zip");
|
||||
|
||||
// Create a document
|
||||
String document1Id = clientUtil.createDocument(file1Token);
|
||||
|
||||
// Add a file
|
||||
String file1Id = clientUtil.addFileToDocument(FILE_PIA_00452_JPG, file1Token, document1Id);
|
||||
|
||||
// Get a ZIP from all files of the document
|
||||
Response response = target().path("/file/zip")
|
||||
.queryParam("id", document1Id)
|
||||
.request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, file1Token)
|
||||
.get();
|
||||
Assert.assertEquals(Status.OK, Status.fromStatusCode(response.getStatus()));
|
||||
InputStream is = (InputStream) response.getEntity();
|
||||
ZipInputStream zipInputStream = new ZipInputStream(is);
|
||||
Assert.assertEquals(zipInputStream.getNextEntry().getName(), "0-PIA00452.jpg");
|
||||
Assert.assertNull(zipInputStream.getNextEntry());
|
||||
|
||||
// Fail if we don't have access to the document
|
||||
response = target().path("/file/zip")
|
||||
.queryParam("id", document1Id)
|
||||
.request()
|
||||
.get();
|
||||
Assert.assertEquals(Status.NOT_FOUND, Status.fromStatusCode(response.getStatus()));
|
||||
|
||||
// Create a document
|
||||
String document2Id = clientUtil.createDocument(file1Token);
|
||||
|
||||
// Add a file
|
||||
String file2Id = clientUtil.addFileToDocument(FILE_EINSTEIN_ROOSEVELT_LETTER_PNG, file1Token, document2Id);
|
||||
|
||||
// Get a ZIP from both files
|
||||
response = target().path("/file/zip")
|
||||
.request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, file1Token)
|
||||
.post(Entity.form(new Form()
|
||||
.param("files", file1Id)
|
||||
.param("files", file2Id)));
|
||||
Assert.assertEquals(Status.OK, Status.fromStatusCode(response.getStatus()));
|
||||
is = (InputStream) response.getEntity();
|
||||
zipInputStream = new ZipInputStream(is);
|
||||
Assert.assertNotNull(zipInputStream.getNextEntry().getName());
|
||||
Assert.assertNotNull(zipInputStream.getNextEntry().getName());
|
||||
Assert.assertNull(zipInputStream.getNextEntry());
|
||||
|
||||
// Fail if we don't have access to the files
|
||||
response = target().path("/file/zip")
|
||||
.request()
|
||||
.post(Entity.form(new Form()
|
||||
.param("files", file1Id)
|
||||
.param("files", file2Id)));
|
||||
Assert.assertEquals(Status.FORBIDDEN, Status.fromStatusCode(response.getStatus()));
|
||||
}
|
||||
|
||||
/**
|
||||
* Test using a ZIP file.
|
||||
*
|
||||
* @throws Exception e
|
||||
*/
|
||||
@Test
|
||||
public void testZipFile() throws Exception {
|
||||
// Login file1
|
||||
clientUtil.createUser("file2");
|
||||
String file2Token = clientUtil.login("file2");
|
||||
public void testZipFileUpload() throws Exception {
|
||||
// Login file_zip
|
||||
clientUtil.createUser("file_zip");
|
||||
String fileZipToken = clientUtil.login("file_zip");
|
||||
|
||||
// Create a document
|
||||
long create1Date = new Date().getTime();
|
||||
JsonObject json = target().path("/document").request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, file2Token)
|
||||
.put(Entity.form(new Form()
|
||||
.param("title", "File test document 1")
|
||||
.param("language", "eng")
|
||||
.param("create_date", Long.toString(create1Date))), JsonObject.class);
|
||||
String document1Id = json.getString("id");
|
||||
Assert.assertNotNull(document1Id);
|
||||
String document1Id = clientUtil.createDocument(fileZipToken);
|
||||
|
||||
// Add a file
|
||||
String file1Id;
|
||||
try (InputStream is = Resources.getResource("file/wikipedia.zip").openStream()) {
|
||||
StreamDataBodyPart streamDataBodyPart = new StreamDataBodyPart("file", is, "wikipedia.zip");
|
||||
try (FormDataMultiPart multiPart = new FormDataMultiPart()) {
|
||||
json = target()
|
||||
.register(MultiPartFeature.class)
|
||||
.path("/file").request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, file2Token)
|
||||
.put(Entity.entity(multiPart.field("id", document1Id).bodyPart(streamDataBodyPart),
|
||||
MediaType.MULTIPART_FORM_DATA_TYPE), JsonObject.class);
|
||||
file1Id = json.getString("id");
|
||||
Assert.assertNotNull(file1Id);
|
||||
Assert.assertEquals(525069L, json.getJsonNumber("size").longValue());
|
||||
}
|
||||
}
|
||||
clientUtil.addFileToDocument(FILE_WIKIPEDIA_ZIP, fileZipToken, document1Id);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -341,29 +339,16 @@ public class TestFileResource extends BaseJerseyTest {
|
||||
*/
|
||||
@Test
|
||||
public void testOrphanFile() throws Exception {
|
||||
// Login file3
|
||||
clientUtil.createUser("file3");
|
||||
String file3Token = clientUtil.login("file3");
|
||||
// Login file_orphan
|
||||
clientUtil.createUser("file_orphan");
|
||||
String fileOrphanToken = clientUtil.login("file_orphan");
|
||||
|
||||
// Add a file
|
||||
String file1Id;
|
||||
try (InputStream is = Resources.getResource("file/PIA00452.jpg").openStream()) {
|
||||
StreamDataBodyPart streamDataBodyPart = new StreamDataBodyPart("file", is, "PIA00452.jpg");
|
||||
try (FormDataMultiPart multiPart = new FormDataMultiPart()) {
|
||||
JsonObject json = target()
|
||||
.register(MultiPartFeature.class)
|
||||
.path("/file").request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, file3Token)
|
||||
.put(Entity.entity(multiPart.bodyPart(streamDataBodyPart),
|
||||
MediaType.MULTIPART_FORM_DATA_TYPE), JsonObject.class);
|
||||
file1Id = json.getString("id");
|
||||
Assert.assertNotNull(file1Id);
|
||||
}
|
||||
}
|
||||
String file1Id = clientUtil.addFileToDocument(FILE_PIA_00452_JPG, fileOrphanToken, null);
|
||||
|
||||
// Get all orphan files
|
||||
JsonObject json = target().path("/file/list").request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, file3Token)
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, fileOrphanToken)
|
||||
.get(JsonObject.class);
|
||||
JsonArray files = json.getJsonArray("files");
|
||||
Assert.assertEquals(1, files.size());
|
||||
@@ -372,66 +357,45 @@ public class TestFileResource extends BaseJerseyTest {
|
||||
Response response = target().path("/file/" + file1Id + "/data")
|
||||
.queryParam("size", "thumb")
|
||||
.request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, file3Token)
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, fileOrphanToken)
|
||||
.get();
|
||||
Assert.assertEquals(Status.OK, Status.fromStatusCode(response.getStatus()));
|
||||
InputStream is = (InputStream) response.getEntity();
|
||||
byte[] fileBytes = ByteStreams.toByteArray(is);
|
||||
Assert.assertEquals(MimeType.IMAGE_JPEG, MimeTypeUtil.guessMimeType(fileBytes, null));
|
||||
Assert.assertTrue(fileBytes.length > 0);
|
||||
|
||||
// Get the file data
|
||||
response = target().path("/file/" + file1Id + "/data").request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, file3Token)
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, fileOrphanToken)
|
||||
.get();
|
||||
is = (InputStream) response.getEntity();
|
||||
fileBytes = ByteStreams.toByteArray(is);
|
||||
Assert.assertEquals(MimeType.IMAGE_JPEG, MimeTypeUtil.guessMimeType(fileBytes, null));
|
||||
Assert.assertEquals(163510, fileBytes.length);
|
||||
|
||||
// Create a document
|
||||
json = target().path("/document").request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, file3Token)
|
||||
.put(Entity.form(new Form()
|
||||
.param("title", "File test document 1")
|
||||
.param("language", "eng")), JsonObject.class);
|
||||
String document1Id = json.getString("id");
|
||||
Assert.assertNotNull(document1Id);
|
||||
// Create another document
|
||||
String document2Id = clientUtil.createDocument(fileOrphanToken);
|
||||
|
||||
// Attach a file to a document
|
||||
target().path("/file/" + file1Id + "/attach").request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, file3Token)
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, fileOrphanToken)
|
||||
.post(Entity.form(new Form()
|
||||
.param("id", document1Id)), JsonObject.class);
|
||||
.param("id", document2Id)), JsonObject.class);
|
||||
|
||||
// Get all files from a document
|
||||
json = target().path("/file/list")
|
||||
.queryParam("id", document1Id)
|
||||
.queryParam("id", document2Id)
|
||||
.request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, file3Token)
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, fileOrphanToken)
|
||||
.get(JsonObject.class);
|
||||
files = json.getJsonArray("files");
|
||||
Assert.assertEquals(1, files.size());
|
||||
|
||||
// Add a file
|
||||
String file2Id;
|
||||
try (InputStream is0 = Resources.getResource("file/PIA00452.jpg").openStream()) {
|
||||
StreamDataBodyPart streamDataBodyPart = new StreamDataBodyPart("file", is0, "PIA00452.jpg");
|
||||
try (FormDataMultiPart multiPart = new FormDataMultiPart()) {
|
||||
json = target()
|
||||
.register(MultiPartFeature.class)
|
||||
.path("/file").request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, file3Token)
|
||||
.put(Entity.entity(multiPart.bodyPart(streamDataBodyPart),
|
||||
MediaType.MULTIPART_FORM_DATA_TYPE), JsonObject.class);
|
||||
file2Id = json.getString("id");
|
||||
Assert.assertNotNull(file2Id);
|
||||
}
|
||||
}
|
||||
String file2Id = clientUtil.addFileToDocument(FILE_PIA_00452_JPG, fileOrphanToken, null);
|
||||
|
||||
// Deletes a file
|
||||
json = target().path("/file/" + file2Id).request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, file3Token)
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, fileOrphanToken)
|
||||
.delete(JsonObject.class);
|
||||
Assert.assertEquals("ok", json.getString("status"));
|
||||
}
|
||||
@@ -448,20 +412,7 @@ public class TestFileResource extends BaseJerseyTest {
|
||||
String fileQuotaToken = clientUtil.login("file_quota");
|
||||
|
||||
// Add a file (292641 bytes large)
|
||||
String file1Id;
|
||||
try (InputStream is = Resources.getResource("file/Einstein-Roosevelt-letter.png").openStream()) {
|
||||
StreamDataBodyPart streamDataBodyPart = new StreamDataBodyPart("file", is, "Einstein-Roosevelt-letter.png");
|
||||
try (FormDataMultiPart multiPart = new FormDataMultiPart()) {
|
||||
JsonObject json = target()
|
||||
.register(MultiPartFeature.class)
|
||||
.path("/file").request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, fileQuotaToken)
|
||||
.put(Entity.entity(multiPart.bodyPart(streamDataBodyPart),
|
||||
MediaType.MULTIPART_FORM_DATA_TYPE), JsonObject.class);
|
||||
file1Id = json.getString("id");
|
||||
Assert.assertNotNull(file1Id);
|
||||
}
|
||||
}
|
||||
String file1Id = clientUtil.addFileToDocument(FILE_EINSTEIN_ROOSEVELT_LETTER_PNG, fileQuotaToken, null);
|
||||
|
||||
// Check current quota
|
||||
JsonObject json = target().path("/user").request()
|
||||
@@ -470,17 +421,7 @@ public class TestFileResource extends BaseJerseyTest {
|
||||
Assert.assertEquals(292641L, json.getJsonNumber("storage_current").longValue());
|
||||
|
||||
// Add a file (292641 bytes large)
|
||||
try (InputStream is = Resources.getResource("file/Einstein-Roosevelt-letter.png").openStream()) {
|
||||
StreamDataBodyPart streamDataBodyPart = new StreamDataBodyPart("file", is, "Einstein-Roosevelt-letter.png");
|
||||
try (FormDataMultiPart multiPart = new FormDataMultiPart()) {
|
||||
target()
|
||||
.register(MultiPartFeature.class)
|
||||
.path("/file").request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, fileQuotaToken)
|
||||
.put(Entity.entity(multiPart.bodyPart(streamDataBodyPart),
|
||||
MediaType.MULTIPART_FORM_DATA_TYPE), JsonObject.class);
|
||||
}
|
||||
}
|
||||
clientUtil.addFileToDocument(FILE_EINSTEIN_ROOSEVELT_LETTER_PNG, fileQuotaToken, null);
|
||||
|
||||
// Check current quota
|
||||
json = target().path("/user").request()
|
||||
@@ -489,17 +430,7 @@ public class TestFileResource extends BaseJerseyTest {
|
||||
Assert.assertEquals(585282L, json.getJsonNumber("storage_current").longValue());
|
||||
|
||||
// Add a file (292641 bytes large)
|
||||
try (InputStream is = Resources.getResource("file/Einstein-Roosevelt-letter.png").openStream()) {
|
||||
StreamDataBodyPart streamDataBodyPart = new StreamDataBodyPart("file", is, "Einstein-Roosevelt-letter.png");
|
||||
try (FormDataMultiPart multiPart = new FormDataMultiPart()) {
|
||||
target()
|
||||
.register(MultiPartFeature.class)
|
||||
.path("/file").request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, fileQuotaToken)
|
||||
.put(Entity.entity(multiPart.bodyPart(streamDataBodyPart),
|
||||
MediaType.MULTIPART_FORM_DATA_TYPE), JsonObject.class);
|
||||
}
|
||||
}
|
||||
clientUtil.addFileToDocument(FILE_EINSTEIN_ROOSEVELT_LETTER_PNG, fileQuotaToken, null);
|
||||
|
||||
// Check current quota
|
||||
json = target().path("/user").request()
|
||||
@@ -508,17 +439,10 @@ public class TestFileResource extends BaseJerseyTest {
|
||||
Assert.assertEquals(877923L, json.getJsonNumber("storage_current").longValue());
|
||||
|
||||
// Add a file (292641 bytes large)
|
||||
try (InputStream is = Resources.getResource("file/Einstein-Roosevelt-letter.png").openStream()) {
|
||||
StreamDataBodyPart streamDataBodyPart = new StreamDataBodyPart("file", is, "Einstein-Roosevelt-letter.png");
|
||||
try (FormDataMultiPart multiPart = new FormDataMultiPart()) {
|
||||
Response response = target()
|
||||
.register(MultiPartFeature.class)
|
||||
.path("/file").request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, fileQuotaToken)
|
||||
.put(Entity.entity(multiPart.bodyPart(streamDataBodyPart),
|
||||
MediaType.MULTIPART_FORM_DATA_TYPE));
|
||||
Assert.assertEquals(Status.BAD_REQUEST.getStatusCode(), response.getStatus());
|
||||
}
|
||||
try {
|
||||
clientUtil.addFileToDocument(FILE_EINSTEIN_ROOSEVELT_LETTER_PNG, fileQuotaToken, null);
|
||||
Assert.fail();
|
||||
} catch (javax.ws.rs.BadRequestException ignored) {
|
||||
}
|
||||
|
||||
// Deletes a file
|
||||
@@ -545,17 +469,7 @@ public class TestFileResource extends BaseJerseyTest {
|
||||
Assert.assertNotNull(document1Id);
|
||||
|
||||
// Add a file to this document (163510 bytes large)
|
||||
try (InputStream is = Resources.getResource("file/PIA00452.jpg").openStream()) {
|
||||
StreamDataBodyPart streamDataBodyPart = new StreamDataBodyPart("file", is, "PIA00452.jpg");
|
||||
try (FormDataMultiPart multiPart = new FormDataMultiPart()) {
|
||||
target()
|
||||
.register(MultiPartFeature.class)
|
||||
.path("/file").request()
|
||||
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, fileQuotaToken)
|
||||
.put(Entity.entity(multiPart.field("id", document1Id).bodyPart(streamDataBodyPart),
|
||||
MediaType.MULTIPART_FORM_DATA_TYPE), JsonObject.class);
|
||||
}
|
||||
}
|
||||
clientUtil.addFileToDocument(FILE_PIA_00452_JPG, fileQuotaToken, document1Id);
|
||||
|
||||
// Check current quota
|
||||
json = target().path("/user").request()
|
||||
|
||||
@@ -439,13 +439,11 @@ public class TestUserResource extends BaseJerseyTest {
|
||||
// Create absent_minded who lost his password
|
||||
clientUtil.createUser("absent_minded");
|
||||
|
||||
// User no_such_user try to recovery its password: invalid user
|
||||
Response response = target().path("/user/password_lost").request()
|
||||
// User no_such_user try to recovery its password: silently do nothing to avoid leaking users
|
||||
JsonObject json = target().path("/user/password_lost").request()
|
||||
.post(Entity.form(new Form()
|
||||
.param("username", "no_such_user")));
|
||||
Assert.assertEquals(Response.Status.BAD_REQUEST, Response.Status.fromStatusCode(response.getStatus()));
|
||||
JsonObject json = response.readEntity(JsonObject.class);
|
||||
Assert.assertEquals("UserNotFound", json.getString("type"));
|
||||
.param("username", "no_such_user")), JsonObject.class);
|
||||
Assert.assertEquals("ok", json.getString("status"));
|
||||
|
||||
// User absent_minded try to recovery its password: OK
|
||||
json = target().path("/user/password_lost").request()
|
||||
@@ -461,7 +459,7 @@ public class TestUserResource extends BaseJerseyTest {
|
||||
String key = keyMatcher.group(1).replaceAll("=", "");
|
||||
|
||||
// User absent_minded resets its password: invalid key
|
||||
response = target().path("/user/password_reset").request()
|
||||
Response response = target().path("/user/password_reset").request()
|
||||
.post(Entity.form(new Form()
|
||||
.param("key", "no_such_key")
|
||||
.param("password", "87654321")));
|
||||
|
||||
0
docs/.gitkeep
Normal file
0
docs/.gitkeep
Normal file
10
pom.xml
10
pom.xml
@@ -6,7 +6,7 @@
|
||||
<groupId>com.sismics.docs</groupId>
|
||||
<artifactId>docs-parent</artifactId>
|
||||
<packaging>pom</packaging>
|
||||
<version>1.10</version>
|
||||
<version>1.11</version>
|
||||
|
||||
<name>Docs Parent</name>
|
||||
|
||||
@@ -52,16 +52,16 @@
|
||||
<com.squareup.okhttp3.okhttp.version>4.9.0</com.squareup.okhttp3.okhttp.version>
|
||||
<org.apache.directory.api.api-all.version>2.0.1</org.apache.directory.api.api-all.version>
|
||||
|
||||
<org.eclipse.jetty.jetty-server.version>9.4.36.v20210114</org.eclipse.jetty.jetty-server.version>
|
||||
<org.eclipse.jetty.jetty-webapp.version>9.4.36.v20210114</org.eclipse.jetty.jetty-webapp.version>
|
||||
<org.eclipse.jetty.jetty-servlet.version>9.4.36.v20210114</org.eclipse.jetty.jetty-servlet.version>
|
||||
<org.eclipse.jetty.jetty-server.version>9.4.51.v20230217</org.eclipse.jetty.jetty-server.version>
|
||||
<org.eclipse.jetty.jetty-webapp.version>9.4.51.v20230217</org.eclipse.jetty.jetty-webapp.version>
|
||||
<org.eclipse.jetty.jetty-servlet.version>9.4.51.v20230217</org.eclipse.jetty.jetty-servlet.version>
|
||||
|
||||
<!-- Plugins version -->
|
||||
<org.apache.maven.plugins.maven-antrun-plugin.version>3.0.0</org.apache.maven.plugins.maven-antrun-plugin.version>
|
||||
<org.apache.maven.plugins.maven-jar-plugin.version>3.2.0</org.apache.maven.plugins.maven-jar-plugin.version>
|
||||
<org.apache.maven.plugins.maven-war-plugin.version>3.3.1</org.apache.maven.plugins.maven-war-plugin.version>
|
||||
<org.apache.maven.plugins.maven-surefire-plugin.version>3.0.0-M5</org.apache.maven.plugins.maven-surefire-plugin.version>
|
||||
<org.eclipse.jetty.jetty-maven-plugin.version>9.4.36.v20210114</org.eclipse.jetty.jetty-maven-plugin.version>
|
||||
<org.eclipse.jetty.jetty-maven-plugin.version>9.4.51.v20230217</org.eclipse.jetty.jetty-maven-plugin.version>
|
||||
</properties>
|
||||
|
||||
<scm>
|
||||
|
||||
Reference in New Issue
Block a user