mirror of
https://github.com/sismics/docs.git
synced 2025-12-14 10:16:21 +00:00
Compare commits
194 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
59597e962d | ||
|
|
c85a951a9e | ||
|
|
7f47a17633 | ||
|
|
690c961a55 | ||
|
|
21efd1e4a7 | ||
|
|
ad27228429 | ||
|
|
dd4a1667ca | ||
|
|
399d2b7951 | ||
|
|
d51dfd6636 | ||
|
|
ca85c1fa9f | ||
|
|
5e7f06070e | ||
|
|
dc0c20cd0c | ||
|
|
98aa33341a | ||
|
|
1f7c0afc1e | ||
|
|
1ccce3f942 | ||
|
|
90d5bc8de7 | ||
|
|
c6a685d7c0 | ||
|
|
e6cfd899e5 | ||
|
|
bd23f14792 | ||
|
|
46f6b9e537 | ||
|
|
d5832c48e1 | ||
|
|
64ec0f63ca | ||
|
|
0b7c42e814 | ||
|
|
d8dc63fc98 | ||
|
|
81a7f154c2 | ||
|
|
af3263d471 | ||
|
|
bbe5f19997 | ||
|
|
f33650c099 | ||
|
|
58f81ec851 | ||
|
|
c9262eb204 | ||
|
|
3637b832e5 | ||
|
|
ee56cfe2b4 | ||
|
|
721410c7d0 | ||
|
|
f0310e3933 | ||
|
|
302d7cccc4 | ||
|
|
f9977d5ce6 | ||
|
|
0a927fd320 | ||
|
|
523501a592 | ||
|
|
ff8155be6a | ||
|
|
6c5d697051 | ||
|
|
b19145160e | ||
|
|
c7ada71ef5 | ||
|
|
4951229576 | ||
|
|
d98c1bddec | ||
|
|
b0d0e93364 | ||
|
|
f20a562439 | ||
|
|
4ae8475f5e | ||
|
|
fd4c627c61 | ||
|
|
a867d48232 | ||
|
|
f6bf61fce9 | ||
|
|
c60c9a8f74 | ||
|
|
dc021ab71e | ||
|
|
18b5551f6c | ||
|
|
6fcd8771a5 | ||
|
|
1fef4c3d2e | ||
|
|
ee6ed2bf0b | ||
|
|
57b67fee09 | ||
|
|
a6cbacae72 | ||
|
|
1e0f8e2484 | ||
|
|
bcb4c6d7b0 | ||
|
|
ea1d5907c1 | ||
|
|
05bac38fc3 | ||
|
|
69746cd369 | ||
|
|
ff3db531e5 | ||
|
|
558de7ba3f | ||
|
|
af15116bf9 | ||
|
|
36e5a9747b | ||
|
|
1d66b47f5f | ||
|
|
1346dd3616 | ||
|
|
b6ec5e108b | ||
|
|
5b2833350c | ||
|
|
66acb380ab | ||
|
|
00c62f2ad4 | ||
|
|
7205863d95 | ||
|
|
2a4274d583 | ||
|
|
087184b598 | ||
|
|
e5600e0be7 | ||
|
|
964f3128d2 | ||
|
|
69905cdc55 | ||
|
|
bf4e277db7 | ||
|
|
eaa7cca278 | ||
|
|
0e115bb808 | ||
|
|
1897f5567b | ||
|
|
d647528b3c | ||
|
|
07d42cdb9c | ||
|
|
dabb960c94 | ||
|
|
c71e794051 | ||
|
|
1584c0cbb2 | ||
|
|
22f0f1abf4 | ||
|
|
205f92d093 | ||
|
|
7488ac15a7 | ||
|
|
44f5db993a | ||
|
|
f76eae23ca | ||
|
|
5e2a18f819 | ||
|
|
2f6e5d53c2 | ||
|
|
50e6c4d965 | ||
|
|
3ad0554a7d | ||
|
|
113ec78c67 | ||
|
|
f814927eca | ||
|
|
a9719feeec | ||
|
|
6dc4f1b448 | ||
|
|
e1fa17691d | ||
|
|
42e61d6e1f | ||
|
|
2bf3e6bd3c | ||
|
|
608b2f868d | ||
|
|
46638bab5b | ||
|
|
4607362e46 | ||
|
|
041b2dfcc1 | ||
|
|
7ad0dd43e2 | ||
|
|
35339f7328 | ||
|
|
e474e7cd75 | ||
|
|
612fab2aef | ||
|
|
3f67bd471b | ||
|
|
cb29dcd6cc | ||
|
|
d428e89c30 | ||
|
|
9b2aeb7480 | ||
|
|
d9ad69c7ff | ||
|
|
16fc058264 | ||
|
|
520b143165 | ||
|
|
95c37a03f8 | ||
|
|
0d058b9c9c | ||
|
|
7c72b5e69b | ||
|
|
3ec254e908 | ||
|
|
fda13c004e | ||
|
|
3af85eeea6 | ||
|
|
c08616e6df | ||
|
|
7faa0f8a54 | ||
|
|
26c5fe2e69 | ||
|
|
6bdaa8352b | ||
|
|
6367a1fd15 | ||
|
|
2c5ff64d42 | ||
|
|
e614cb41d8 | ||
|
|
82737e2280 | ||
|
|
3b5c27096b | ||
|
|
8a85830bd3 | ||
|
|
19ac90688e | ||
|
|
5f4a6bc462 | ||
|
|
4c7f3166d4 | ||
|
|
4233f4dd88 | ||
|
|
bd09312418 | ||
|
|
11ab07b238 | ||
|
|
d2e2f089fb | ||
|
|
d619f98de7 | ||
|
|
89228a52dc | ||
|
|
90a49efa4a | ||
|
|
a7423caeb1 | ||
|
|
6f31a2c228 | ||
|
|
fc98b0882f | ||
|
|
dff05967ea | ||
|
|
ec836a2f9d | ||
|
|
737c85cf00 | ||
|
|
ff7b07f464 | ||
|
|
19422b5afa | ||
|
|
6b93e413b6 | ||
|
|
ab72736bcc | ||
|
|
38939e5d05 | ||
|
|
1a90a0e0ad | ||
|
|
7aa9fa4646 | ||
|
|
82d788c8d3 | ||
|
|
ab8176efcb | ||
|
|
b4c3e7a928 | ||
|
|
2db263fb68 | ||
|
|
5fd4d37972 | ||
|
|
9b1dbf351a | ||
|
|
4c7c058e0d | ||
|
|
f8dc08b02b | ||
|
|
0e6bc3ce54 | ||
|
|
fcb018406d | ||
|
|
40756a5e4b | ||
|
|
61b12bdebd | ||
|
|
8b1c41ae1e | ||
|
|
d654564f6b | ||
|
|
8bd22ebafa | ||
|
|
647e66d57b | ||
|
|
67c8ac1aa3 | ||
|
|
f336c7ae53 | ||
|
|
9ea1dad62d | ||
|
|
58bc374e64 | ||
|
|
cea0d4887d | ||
|
|
d5e73ecd8b | ||
|
|
2235a0498b | ||
|
|
3f9b92831c | ||
|
|
5680750c82 | ||
|
|
298e3efe49 | ||
|
|
7b2bd6f9eb | ||
|
|
d935e07990 | ||
|
|
868a74c184 | ||
|
|
a86af9736b | ||
|
|
8bd4d27d2f | ||
|
|
94951c59f3 | ||
|
|
e39c83a5a6 | ||
|
|
94252de73f | ||
|
|
d43072663e | ||
|
|
eb3562567d |
3
.github/FUNDING.yml
vendored
Normal file
3
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
github: [jendib]
|
||||
84
.github/workflows/build-deploy.yml
vendored
Normal file
84
.github/workflows/build-deploy.yml
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
name: Maven CI/CD
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
tags: [v*]
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
build_and_publish:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up JDK 11
|
||||
uses: actions/setup-java@v2
|
||||
with:
|
||||
java-version: "11"
|
||||
distribution: "temurin"
|
||||
cache: maven
|
||||
- name: Install test dependencies
|
||||
run: sudo apt-get -y -q --no-install-recommends install ffmpeg mediainfo tesseract-ocr tesseract-ocr-deu
|
||||
- name: Build with Maven
|
||||
run: mvn -Pprod clean install
|
||||
- name: Upload war artifact
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: docs-web-ci.war
|
||||
path: docs-web/target/docs*.war
|
||||
|
||||
build_docker_image:
|
||||
name: Publish to Docker Hub
|
||||
runs-on: ubuntu-latest
|
||||
needs: [build_and_publish]
|
||||
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
-
|
||||
name: Download war artifact
|
||||
uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: docs-web-ci.war
|
||||
path: docs-web/target
|
||||
-
|
||||
name: Setup up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v1
|
||||
-
|
||||
name: Login to DockerHub
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
-
|
||||
name: Populate Docker metadata
|
||||
id: metadata
|
||||
uses: docker/metadata-action@v3
|
||||
with:
|
||||
images: sismics/docs
|
||||
flavor: |
|
||||
latest=false
|
||||
tags: |
|
||||
type=ref,event=tag
|
||||
type=raw,value=latest,enable=${{ github.ref_type != 'tag' }}
|
||||
labels: |
|
||||
org.opencontainers.image.title = Teedy
|
||||
org.opencontainers.image.description = Teedy is an open source, lightweight document management system for individuals and businesses.
|
||||
org.opencontainers.image.created = ${{ github.event_created_at }}
|
||||
org.opencontainers.image.author = Sismics
|
||||
org.opencontainers.image.url = https://teedy.io/
|
||||
org.opencontainers.image.vendor = Sismics
|
||||
org.opencontainers.image.license = GPLv2
|
||||
org.opencontainers.image.version = ${{ github.event_head_commit.id }}
|
||||
-
|
||||
name: Build and push
|
||||
id: docker_build
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
tags: ${{ steps.metadata.outputs.tags }}
|
||||
labels: ${{ steps.metadata.outputs.labels }}
|
||||
11
.gitignore
vendored
11
.gitignore
vendored
@@ -11,6 +11,11 @@
|
||||
*.iml
|
||||
node_modules
|
||||
import_test
|
||||
docs-importer-linux
|
||||
docs-importer-macos
|
||||
docs-importer-win.exe
|
||||
teedy-importer-linux
|
||||
teedy-importer-macos
|
||||
teedy-importer-win.exe
|
||||
docs/*
|
||||
!docs/.gitkeep
|
||||
|
||||
#macos
|
||||
.DS_Store
|
||||
|
||||
26
.travis.yml
26
.travis.yml
@@ -1,26 +0,0 @@
|
||||
sudo: required
|
||||
dist: trusty
|
||||
language: java
|
||||
before_install:
|
||||
- sudo add-apt-repository -y ppa:mc3man/trusty-media
|
||||
- sudo apt-get -qq update
|
||||
- sudo apt-get -y -q install ffmpeg mediainfo tesseract-ocr tesseract-ocr-fra tesseract-ocr-ita tesseract-ocr-kor tesseract-ocr-rus tesseract-ocr-ukr tesseract-ocr-spa tesseract-ocr-ara tesseract-ocr-hin tesseract-ocr-deu tesseract-ocr-pol tesseract-ocr-jpn tesseract-ocr-por tesseract-ocr-tha tesseract-ocr-jpn tesseract-ocr-chi-sim tesseract-ocr-chi-tra tesseract-ocr-nld
|
||||
- sudo apt-get -y -q install haveged && sudo service haveged start
|
||||
after_success:
|
||||
- |
|
||||
if [ "$TRAVIS_PULL_REQUEST" == "false" ]; then
|
||||
mvn -Pprod -DskipTests clean install
|
||||
docker login -u $DOCKER_USER -p $DOCKER_PASS
|
||||
export REPO=sismics/docs
|
||||
export TAG=`if [ "$TRAVIS_BRANCH" == "master" ]; then echo "latest"; else echo $TRAVIS_BRANCH ; fi`
|
||||
docker build -f Dockerfile -t $REPO:$COMMIT .
|
||||
docker tag $REPO:$COMMIT $REPO:$TAG
|
||||
docker tag $REPO:$COMMIT $REPO:travis-$TRAVIS_BUILD_NUMBER
|
||||
docker push $REPO
|
||||
fi
|
||||
env:
|
||||
global:
|
||||
- secure: LRGpjWORb0qy6VuypZjTAfA8uRHlFUMTwb77cenS9PPRBxuSnctC531asS9Xg3DqC5nsRxBBprgfCKotn5S8nBSD1ceHh84NASyzLSBft3xSMbg7f/2i7MQ+pGVwLncusBU6E/drnMFwZBleo+9M8Tf96axY5zuUp90MUTpSgt0=
|
||||
- secure: bCDDR6+I7PmSkuTYZv1HF/z98ANX/SFEESUCqxVmV5Gs0zFC0vQXaPJQ2xaJNRop1HZBFMZLeMMPleb0iOs985smpvK2F6Rbop9Tu+Vyo0uKqv9tbZ7F8Nfgnv9suHKZlL84FNeUQZJX6vsFIYPEJ/r7K5P/M0PdUy++fEwxEhU=
|
||||
- secure: ewXnzbkgCIHpDWtaWGMa1OYZJ/ki99zcIl4jcDPIC0eB3njX/WgfcC6i0Ke9mLqDqwXarWJ6helm22sNh+xtQiz6isfBtBX+novfRt9AANrBe3koCMUemMDy7oh5VflBaFNP0DVb8LSCnwf6dx6ZB5E9EB8knvk40quc/cXpGjY=
|
||||
- COMMIT=${TRAVIS_COMMIT::8}
|
||||
38
Dockerfile
38
Dockerfile
@@ -1,7 +1,37 @@
|
||||
FROM sismics/ubuntu-jetty:9.4.12
|
||||
MAINTAINER b.gamard@sismics.com
|
||||
FROM sismics/ubuntu-jetty:9.4.51
|
||||
LABEL maintainer="b.gamard@sismics.com"
|
||||
|
||||
RUN apt-get update && apt-get -y -q install ffmpeg mediainfo tesseract-ocr tesseract-ocr-fra tesseract-ocr-ita tesseract-ocr-kor tesseract-ocr-rus tesseract-ocr-ukr tesseract-ocr-spa tesseract-ocr-ara tesseract-ocr-hin tesseract-ocr-deu tesseract-ocr-pol tesseract-ocr-jpn tesseract-ocr-por tesseract-ocr-tha tesseract-ocr-jpn tesseract-ocr-chi-sim tesseract-ocr-chi-tra tesseract-ocr-nld && \
|
||||
RUN apt-get update && \
|
||||
apt-get -y -q --no-install-recommends install \
|
||||
ffmpeg \
|
||||
mediainfo \
|
||||
tesseract-ocr \
|
||||
tesseract-ocr-ara \
|
||||
tesseract-ocr-ces \
|
||||
tesseract-ocr-chi-sim \
|
||||
tesseract-ocr-chi-tra \
|
||||
tesseract-ocr-dan \
|
||||
tesseract-ocr-deu \
|
||||
tesseract-ocr-fin \
|
||||
tesseract-ocr-fra \
|
||||
tesseract-ocr-heb \
|
||||
tesseract-ocr-hin \
|
||||
tesseract-ocr-hun \
|
||||
tesseract-ocr-ita \
|
||||
tesseract-ocr-jpn \
|
||||
tesseract-ocr-kor \
|
||||
tesseract-ocr-lav \
|
||||
tesseract-ocr-nld \
|
||||
tesseract-ocr-nor \
|
||||
tesseract-ocr-pol \
|
||||
tesseract-ocr-por \
|
||||
tesseract-ocr-rus \
|
||||
tesseract-ocr-spa \
|
||||
tesseract-ocr-swe \
|
||||
tesseract-ocr-tha \
|
||||
tesseract-ocr-tur \
|
||||
tesseract-ocr-ukr \
|
||||
tesseract-ocr-vie && \
|
||||
apt-get clean && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Remove the embedded javax.mail jar from Jetty
|
||||
@@ -9,3 +39,5 @@ RUN rm -f /opt/jetty/lib/mail/javax.mail.glassfish-*.jar
|
||||
|
||||
ADD docs.xml /opt/jetty/webapps/docs.xml
|
||||
ADD docs-web/target/docs-web-*.war /opt/jetty/webapps/docs.war
|
||||
|
||||
ENV JAVA_OPTIONS -Xmx1g
|
||||
|
||||
214
README.md
214
README.md
@@ -1,41 +1,38 @@
|
||||
<h3 align="center">
|
||||
<img src="https://www.sismicsdocs.com/img/github-title.png" alt="Sismics Docs" width=500 />
|
||||
<img src="https://teedy.io/img/github-title.png" alt="Teedy" width=500 />
|
||||
</h3>
|
||||
|
||||
[](https://twitter.com/sismicsdocs)
|
||||
[](https://www.gnu.org/licenses/old-licenses/gpl-2.0.en.html)
|
||||
[](http://travis-ci.org/sismics/docs)
|
||||
|
||||
Docs is an open source, lightweight document management system for individuals and businesses.
|
||||
|
||||
**Discuss it on [Product Hunt](https://www.producthunt.com/posts/sismics-docs) 🦄**
|
||||
Teedy is an open source, lightweight document management system for individuals and businesses.
|
||||
|
||||
<hr />
|
||||
<h2 align="center">
|
||||
✨ We just launched a Cloud version of Sismics Docs! Head to <a href="https://www.sismicsdocs.com/">sismicsdocs.com</a> for more informations ✨
|
||||
✨ <a href="https://github.com/users/jendib/sponsorship">Sponsor this project if you use and appreciate it!</a> ✨
|
||||
</h2>
|
||||
<hr />
|
||||
|
||||

|
||||

|
||||
|
||||
Demo
|
||||
----
|
||||
# Demo
|
||||
|
||||
A demo is available at [demo.teedy.io](https://demo.teedy.io)
|
||||
|
||||
A demo is available at [demo.sismicsdocs.com](https://demo.sismicsdocs.com)
|
||||
- Guest login is enabled with read access on all documents
|
||||
- "admin" login with "admin" password
|
||||
- "demo" login with "password" password
|
||||
|
||||
Features
|
||||
--------
|
||||
# Features
|
||||
|
||||
- Responsive user interface
|
||||
- Optical character recognition
|
||||
- LDAP authentication 
|
||||
- Support image, PDF, ODT, DOCX, PPTX files
|
||||
- Video file support
|
||||
- Flexible search engine with suggestions and highlighting
|
||||
- Full text search in all supported files
|
||||
- All [Dublin Core](http://dublincore.org/) metadata
|
||||
- Custom user-defined metadata 
|
||||
- Workflow system 
|
||||
- 256-bit AES encryption of stored files
|
||||
- File versioning 
|
||||
@@ -55,86 +52,191 @@ Features
|
||||
- [Bulk files importer](https://github.com/sismics/docs/tree/master/docs-importer) (single or scan mode)
|
||||
- Tested to one million documents
|
||||
|
||||
Install with Docker
|
||||
-------------------
|
||||
# Install with Docker
|
||||
|
||||
A preconfigured Docker image is available, including OCR and media conversion tools, listening on port 8080. The database is an embedded H2 database but PostgreSQL is also supported for more performance.
|
||||
|
||||
From a Docker host, run this command to download and install Sismics Docs. The server will run on <http://[your-docker-host-ip]:8100>.
|
||||
**The default admin password is "admin". Don't forget to change it before going to production.**
|
||||
|
||||
docker run --rm --name sismics_docs_latest -d -e DOCS_BASE_URL='http://[your-docker-host-ip]:8100' -p 8100:8080 -v sismics_docs_latest:/data sismics/docs:latest
|
||||
<img src="http://www.newdesignfile.com/postpic/2011/01/green-info-icon_206509.png" width="16px" height="16px"> **Note:** You will need to change [your-docker-host-ip] with the IP address or FQDN of your docker host e.g.
|
||||
|
||||
FQDN: http://docs.sismics.com
|
||||
IP: http://192.168.100.10
|
||||
- Master branch, can be unstable. Not recommended for production use: `sismics/docs:latest`
|
||||
- Latest stable version: `sismics/docs:v1.11`
|
||||
|
||||
Manual installation
|
||||
-------------------
|
||||
The data directory is `/data`. Don't forget to mount a volume on it.
|
||||
|
||||
#### Requirements
|
||||
- Java 8 with the [Java Cryptography Extension](http://www.oracle.com/technetwork/java/javase/downloads/jce-7-download-432124.html)
|
||||
- Tesseract 3 or 4 for OCR
|
||||
To build external URL, the server is expecting a `DOCS_BASE_URL` environment variable (for example https://teedy.mycompany.com)
|
||||
|
||||
## Available environment variables
|
||||
|
||||
- General
|
||||
- `DOCS_BASE_URL`: The base url used by the application. Generated url's will be using this as base.
|
||||
- `DOCS_GLOBAL_QUOTA`: Defines the default quota applying to all users.
|
||||
- `DOCS_BCRYPT_WORK`: Defines the work factor which is used for password hashing. The default is `10`. This value may be `4...31` including `4` and `31`. The specified value will be used for all new users and users changing their password. Be aware that setting this factor to high can heavily impact login and user creation performance.
|
||||
|
||||
- Admin
|
||||
- `DOCS_ADMIN_EMAIL_INIT`: Defines the e-mail-address the admin user should have upon initialization.
|
||||
- `DOCS_ADMIN_PASSWORD_INIT`: Defines the password the admin user should have upon initialization. Needs to be a bcrypt hash. **Be aware that `$` within the hash have to be escaped with a second `$`.**
|
||||
|
||||
- Database
|
||||
- `DATABASE_URL`: The jdbc connection string to be used by `hibernate`.
|
||||
- `DATABASE_USER`: The user which should be used for the database connection.
|
||||
- `DATABASE_PASSWORD`: The password to be used for the database connection.
|
||||
|
||||
- Language
|
||||
- `DOCS_DEFAULT_LANGUAGE`: The language which will be used as default. Currently supported values are:
|
||||
- `eng`, `fra`, `ita`, `deu`, `spa`, `por`, `pol`, `rus`, `ukr`, `ara`, `hin`, `chi_sim`, `chi_tra`, `jpn`, `tha`, `kor`, `nld`, `tur`, `heb`, `hun`, `fin`, `swe`, `lav`, `dan`
|
||||
|
||||
- E-Mail
|
||||
- `DOCS_SMTP_HOSTNAME`: Hostname of the SMTP-Server to be used by Teedy.
|
||||
- `DOCS_SMTP_PORT`: The port which should be used.
|
||||
- `DOCS_SMTP_USERNAME`: The username to be used.
|
||||
- `DOCS_SMTP_PASSWORD`: The password to be used.
|
||||
|
||||
## Examples
|
||||
|
||||
In the following examples some passwords are exposed in cleartext. This was done in order to keep the examples simple. We strongly encourage you to use variables with an `.env` file or other means to securely store your passwords.
|
||||
|
||||
### Using the internal database
|
||||
|
||||
```yaml
|
||||
version: '3'
|
||||
services:
|
||||
# Teedy Application
|
||||
teedy-server:
|
||||
image: sismics/docs:v1.11
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
# Map internal port to host
|
||||
- 8080:8080
|
||||
environment:
|
||||
# Base url to be used
|
||||
DOCS_BASE_URL: "https://docs.example.com"
|
||||
# Set the admin email
|
||||
DOCS_ADMIN_EMAIL_INIT: "admin@example.com"
|
||||
# Set the admin password (in this example: "superSecure")
|
||||
DOCS_ADMIN_PASSWORD_INIT: "$$2a$$05$$PcMNUbJvsk7QHFSfEIDaIOjk1VI9/E7IPjTKx.jkjPxkx2EOKSoPS"
|
||||
volumes:
|
||||
- ./docs/data:/data
|
||||
```
|
||||
|
||||
### Using PostgreSQL
|
||||
|
||||
```yaml
|
||||
version: '3'
|
||||
services:
|
||||
# Teedy Application
|
||||
teedy-server:
|
||||
image: sismics/docs:v1.11
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
# Map internal port to host
|
||||
- 8080:8080
|
||||
environment:
|
||||
# Base url to be used
|
||||
DOCS_BASE_URL: "https://docs.example.com"
|
||||
# Set the admin email
|
||||
DOCS_ADMIN_EMAIL_INIT: "admin@example.com"
|
||||
# Set the admin password (in this example: "superSecure")
|
||||
DOCS_ADMIN_PASSWORD_INIT: "$$2a$$05$$PcMNUbJvsk7QHFSfEIDaIOjk1VI9/E7IPjTKx.jkjPxkx2EOKSoPS"
|
||||
# Setup the database connection. "teedy-db" is the hostname
|
||||
# and "teedy" is the name of the database the application
|
||||
# will connect to.
|
||||
DATABASE_URL: "jdbc:postgresql://teedy-db:5432/teedy"
|
||||
DATABASE_USER: "teedy_db_user"
|
||||
DATABASE_PASSWORD: "teedy_db_password"
|
||||
volumes:
|
||||
- ./docs/data:/data
|
||||
networks:
|
||||
- docker-internal
|
||||
- internet
|
||||
depends_on:
|
||||
- teedy-db
|
||||
|
||||
# DB for Teedy
|
||||
teedy-db:
|
||||
image: postgres:13.1-alpine
|
||||
restart: unless-stopped
|
||||
expose:
|
||||
- 5432
|
||||
environment:
|
||||
POSTGRES_USER: "teedy_db_user"
|
||||
POSTGRES_PASSWORD: "teedy_db_password"
|
||||
POSTGRES_DB: "teedy"
|
||||
volumes:
|
||||
- ./docs/db:/var/lib/postgresql/data
|
||||
networks:
|
||||
- docker-internal
|
||||
|
||||
networks:
|
||||
# Network without internet access. The db does not need
|
||||
# access to the host network.
|
||||
docker-internal:
|
||||
driver: bridge
|
||||
internal: true
|
||||
internet:
|
||||
driver: bridge
|
||||
```
|
||||
|
||||
# Manual installation
|
||||
|
||||
## Requirements
|
||||
|
||||
- Java 11
|
||||
- Tesseract 4 for OCR
|
||||
- ffmpeg for video thumbnails
|
||||
- mediainfo for video metadata extraction
|
||||
- A webapp server like [Jetty](http://eclipse.org/jetty/) or [Tomcat](http://tomcat.apache.org/)
|
||||
|
||||
#### Download
|
||||
## Download
|
||||
|
||||
The latest release is downloadable here: <https://github.com/sismics/docs/releases> in WAR format.
|
||||
**The default admin password is "admin". Don't forget to change it before going to production.**
|
||||
|
||||
How to build Docs from the sources
|
||||
----------------------------------
|
||||
## How to build Teedy from the sources
|
||||
|
||||
Prerequisites: JDK 8 with JCE, Maven 3, Tesseract 3 or 4
|
||||
Prerequisites: JDK 11, Maven 3, NPM, Grunt, Tesseract 4
|
||||
|
||||
Docs is organized in several Maven modules:
|
||||
Teedy is organized in several Maven modules:
|
||||
|
||||
- docs-core
|
||||
- docs-web
|
||||
- docs-web-common
|
||||
- docs-core
|
||||
- docs-web
|
||||
- docs-web-common
|
||||
|
||||
First off, clone the repository: `git clone git://github.com/sismics/docs.git`
|
||||
or download the sources from GitHub.
|
||||
|
||||
#### Launch the build
|
||||
### Launch the build
|
||||
|
||||
From the root directory:
|
||||
|
||||
mvn clean -DskipTests install
|
||||
```console
|
||||
mvn clean -DskipTests install
|
||||
```
|
||||
|
||||
#### Run a stand-alone version
|
||||
### Run a stand-alone version
|
||||
|
||||
From the `docs-web` directory:
|
||||
|
||||
mvn jetty:run
|
||||
```console
|
||||
mvn jetty:run
|
||||
```
|
||||
|
||||
#### Build a .war to deploy to your servlet container
|
||||
### Build a .war to deploy to your servlet container
|
||||
|
||||
From the `docs-web` directory:
|
||||
|
||||
mvn -Pprod -DskipTests clean install
|
||||
```console
|
||||
mvn -Pprod -DskipTests clean install
|
||||
```
|
||||
|
||||
You will get your deployable WAR in the `docs-web/target` directory.
|
||||
|
||||
Contributing
|
||||
------------
|
||||
# Contributing
|
||||
|
||||
All contributions are more than welcomed. Contributions may close an issue, fix a bug (reported or not reported), improve the existing code, add new feature, and so on.
|
||||
|
||||
The `master` branch is the default and base branch for the project. It is used for development and all Pull Requests should go there.
|
||||
|
||||
# License
|
||||
|
||||
Community
|
||||
---------
|
||||
|
||||
Get updates on Sismics Docs' development and chat with the project maintainers:
|
||||
|
||||
- Follow [@sismicsdocs on Twitter](https://twitter.com/sismicsdocs)
|
||||
- Read and subscribe to [The Official Sismics Docs Blog](https://blog.sismicsdocs.com/)
|
||||
- Check the [Official Website](https://www.sismicsdocs.com)
|
||||
- Join us [on Facebook](https://www.facebook.com/sismicsdocs)
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
Docs is released under the terms of the GPL license. See `COPYING` for more
|
||||
Teedy is released under the terms of the GPL license. See `COPYING` for more
|
||||
information or see <http://opensource.org/licenses/GPL-2.0>.
|
||||
|
||||
18
docker-compose.yml
Normal file
18
docker-compose.yml
Normal file
@@ -0,0 +1,18 @@
|
||||
version: '3'
|
||||
services:
|
||||
# Teedy Application
|
||||
teedy-server:
|
||||
image: sismics/docs:v1.10
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
# Map internal port to host
|
||||
- 8080:8080
|
||||
environment:
|
||||
# Base url to be used
|
||||
DOCS_BASE_URL: "https://docs.example.com"
|
||||
# Set the admin email
|
||||
DOCS_ADMIN_EMAIL_INIT: "admin@example.com"
|
||||
# Set the admin password (in this example: "superSecure")
|
||||
DOCS_ADMIN_PASSWORD_INIT: "$$2a$$05$$PcMNUbJvsk7QHFSfEIDaIOjk1VI9/E7IPjTKx.jkjPxkx2EOKSoPS"
|
||||
volumes:
|
||||
- ./docs/data:/data
|
||||
@@ -4,7 +4,7 @@ buildscript {
|
||||
google()
|
||||
}
|
||||
dependencies {
|
||||
classpath 'com.android.tools.build:gradle:3.3.0'
|
||||
classpath 'com.android.tools.build:gradle:3.4.0'
|
||||
}
|
||||
}
|
||||
apply plugin: 'com.android.application'
|
||||
|
||||
@@ -15,7 +15,7 @@
|
||||
android:allowBackup="true"
|
||||
android:icon="@mipmap/ic_launcher"
|
||||
android:label="@string/app_name"
|
||||
android:theme="@style/AppTheme" >
|
||||
android:theme="@style/AppTheme">
|
||||
<activity
|
||||
android:name=".activity.LoginActivity"
|
||||
android:label="@string/app_name"
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package com.sismics.docs;
|
||||
|
||||
import android.app.Application;
|
||||
import android.support.v7.app.AppCompatDelegate;
|
||||
|
||||
import com.sismics.docs.model.application.ApplicationContext;
|
||||
import com.sismics.docs.util.PreferenceUtil;
|
||||
@@ -22,5 +23,7 @@ public class MainApplication extends Application {
|
||||
// TODO Provide documents to intent action get content
|
||||
|
||||
super.onCreate();
|
||||
|
||||
AppCompatDelegate.setDefaultNightMode(AppCompatDelegate.MODE_NIGHT_NO);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -34,6 +34,7 @@ public class LanguageAdapter extends BaseAdapter {
|
||||
languageList.add(new Language("fra", R.string.language_french, R.drawable.fra));
|
||||
languageList.add(new Language("eng", R.string.language_english, R.drawable.eng));
|
||||
languageList.add(new Language("deu", R.string.language_german, R.drawable.deu));
|
||||
languageList.add(new Language("pol", R.string.language_polish, R.drawable.pol));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -63,14 +63,13 @@ public class DocListFragment extends Fragment {
|
||||
recyclerView.setAdapter(adapter);
|
||||
recyclerView.setHasFixedSize(true);
|
||||
recyclerView.setLongClickable(true);
|
||||
recyclerView.addItemDecoration(new DividerItemDecoration(getResources().getDrawable(R.drawable.abc_list_divider_mtrl_alpha)));
|
||||
|
||||
// Configure the LayoutManager
|
||||
final LinearLayoutManager layoutManager = new LinearLayoutManager(getActivity());
|
||||
recyclerView.setLayoutManager(layoutManager);
|
||||
|
||||
// Configure the swipe refresh layout
|
||||
swipeRefreshLayout = (SwipeRefreshLayout) view.findViewById(R.id.swipeRefreshLayout);
|
||||
swipeRefreshLayout = view.findViewById(R.id.swipeRefreshLayout);
|
||||
swipeRefreshLayout.setColorSchemeResources(android.R.color.holo_blue_bright,
|
||||
android.R.color.holo_green_light,
|
||||
android.R.color.holo_orange_light,
|
||||
@@ -194,7 +193,7 @@ public class DocListFragment extends Fragment {
|
||||
private void loadDocuments(final View view, final boolean reset) {
|
||||
if (view == null) return;
|
||||
final View progressBar = view.findViewById(R.id.progressBar);
|
||||
final TextView documentsEmptyView = (TextView) view.findViewById(R.id.documentsEmptyView);
|
||||
final TextView documentsEmptyView = view.findViewById(R.id.documentsEmptyView);
|
||||
|
||||
if (reset) {
|
||||
loading = true;
|
||||
|
||||
@@ -156,7 +156,7 @@ public class OkHttpUtil {
|
||||
public static OkHttpClient buildClient(final Context context) {
|
||||
// One-time header computation
|
||||
if (userAgent == null) {
|
||||
userAgent = "Sismics Docs Android " + ApplicationUtil.getVersionName(context) + "/Android " + Build.VERSION.RELEASE + "/" + Build.MODEL;
|
||||
userAgent = "Teedy Android " + ApplicationUtil.getVersionName(context) + "/Android " + Build.VERSION.RELEASE + "/" + Build.MODEL;
|
||||
}
|
||||
|
||||
if (acceptLanguage == null) {
|
||||
|
||||
@@ -39,7 +39,9 @@ public class SearchQueryBuilder {
|
||||
*/
|
||||
public SearchQueryBuilder simpleSearch(String simpleSearch) {
|
||||
if (isValid(simpleSearch)) {
|
||||
query.append(SEARCH_SEPARATOR).append(simpleSearch);
|
||||
query.append(SEARCH_SEPARATOR)
|
||||
.append("simple:")
|
||||
.append(simpleSearch);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
BIN
docs-android/app/src/main/res/drawable-xhdpi/pol.png
Normal file
BIN
docs-android/app/src/main/res/drawable-xhdpi/pol.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 238 B |
@@ -29,7 +29,7 @@
|
||||
android:layout_width="wrap_content"
|
||||
android:layout_height="wrap_content"
|
||||
android:fontFamily="sans-serif-light"
|
||||
android:textColor="#212121"
|
||||
android:textColor="?android:attr/textColorPrimary"
|
||||
android:text="Test"
|
||||
android:textSize="16sp"
|
||||
android:ellipsize="end"
|
||||
@@ -46,7 +46,7 @@
|
||||
android:layout_width="wrap_content"
|
||||
android:layout_height="wrap_content"
|
||||
android:fontFamily="sans-serif-light"
|
||||
android:textColor="#777777"
|
||||
android:textColor="?android:attr/textColorPrimary"
|
||||
android:text="test2"
|
||||
android:textSize="16sp"
|
||||
android:maxLines="1"
|
||||
@@ -69,7 +69,7 @@
|
||||
android:layout_alignParentEnd="true"
|
||||
android:layout_alignParentRight="true"
|
||||
android:layout_alignParentTop="true"
|
||||
android:textColor="#777777"
|
||||
android:textColor="?android:attr/textColorPrimary"
|
||||
android:fontFamily="sans-serif-light"/>
|
||||
|
||||
</RelativeLayout>
|
||||
@@ -9,23 +9,22 @@
|
||||
<android.support.design.widget.CoordinatorLayout
|
||||
xmlns:app="http://schemas.android.com/apk/res-auto"
|
||||
android:id="@+id/overview_coordinator_layout"
|
||||
android:theme="@style/ThemeOverlay.AppCompat.Dark.ActionBar"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="match_parent">
|
||||
|
||||
<android.support.design.widget.AppBarLayout
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="wrap_content">
|
||||
|
||||
|
||||
<android.support.v7.widget.Toolbar
|
||||
android:id="@+id/toolbar"
|
||||
<android.support.design.widget.AppBarLayout
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="?attr/actionBarSize"
|
||||
app:popupTheme="@style/ThemeOverlay.AppCompat.Light"
|
||||
app:layout_scrollFlags="enterAlways|scroll|snap" />
|
||||
android:layout_height="wrap_content">
|
||||
|
||||
</android.support.design.widget.AppBarLayout>
|
||||
|
||||
<android.support.v7.widget.Toolbar
|
||||
android:id="@+id/toolbar"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="?attr/actionBarSize"
|
||||
app:popupTheme="@style/AppTheme"
|
||||
app:layout_scrollFlags="enterAlways|scroll|snap" />
|
||||
|
||||
</android.support.design.widget.AppBarLayout>
|
||||
|
||||
<fragment
|
||||
android:id="@+id/main_fragment"
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 5.7 KiB After Width: | Height: | Size: 6.3 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 11 KiB After Width: | Height: | Size: 9.7 KiB |
@@ -11,7 +11,7 @@
|
||||
<!-- App -->
|
||||
<string name="drawer_open">Navigationsleiste öffnen</string>
|
||||
<string name="drawer_close">Navigationsleiste schließen</string>
|
||||
<string name="login_explain"><![CDATA[Um zu beginnen, müssen Sie Sismics Docs Server herunterladen und installieren <a href="https://github.com/sismics/docs">github.com/sismics/docs</a>, sowie die Login-Daten unten eingeben]]></string>
|
||||
<string name="login_explain"><![CDATA[Um zu beginnen, müssen Sie Teedy Server herunterladen und installieren <a href="https://github.com/sismics/docs">github.com/sismics/docs</a>, sowie die Login-Daten unten eingeben]]></string>
|
||||
<string name="server">Server</string>
|
||||
<string name="username">Username</string>
|
||||
<string name="password">Password</string>
|
||||
@@ -83,7 +83,7 @@
|
||||
<string name="file_delete_failure">Netzwerkfehler beim Löschen der Datei</string>
|
||||
<string name="file_deleting_message">Lösche Datei</string>
|
||||
<string name="error_reading_file">Fehler beim Lesen der Datei</string>
|
||||
<string name="upload_notification_title">Sismics Docs</string>
|
||||
<string name="upload_notification_title">Teedy</string>
|
||||
<string name="upload_notification_message">Neue Datei in das Dokument hochladen</string>
|
||||
<string name="upload_notification_error">Fehler beim Hochladen der neuen Datei</string>
|
||||
<string name="delete_file">Aktuelle Datei löschen</string>
|
||||
@@ -119,9 +119,9 @@
|
||||
<string name="export_comments">Kommentare exportieren</string>
|
||||
<string name="export_metadata">Metadaten exportieren</string>
|
||||
<string name="mm">mm</string>
|
||||
<string name="download_file_title">Sismics Docs Datei Export</string>
|
||||
<string name="download_document_title">Sismics Docs Dokumentenexport</string>
|
||||
<string name="download_pdf_title">Sismics Docs PDF Export</string>
|
||||
<string name="download_file_title">Teedy Datei Export</string>
|
||||
<string name="download_document_title">Teedy Dokumentenexport</string>
|
||||
<string name="download_pdf_title">Teedy PDF Export</string>
|
||||
<string name="latest_activity">Letzte Aktivität</string>
|
||||
<string name="activity">Aktivitäten</string>
|
||||
<string name="email">E-Mail</string>
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
<!-- App -->
|
||||
<string name="drawer_open">Ouvrir le menu de navigation</string>
|
||||
<string name="drawer_close">Fermer le menu de navigation</string>
|
||||
<string name="login_explain"><![CDATA[Pour commencer, vous devez télécharger et installer le serveur Sismics Docs sur <a href="https://github.com/sismics/docs">github.com/sismics/docs</a> et entrer son URL ci-dessous]]></string>
|
||||
<string name="login_explain"><![CDATA[Pour commencer, vous devez télécharger et installer le serveur Teedy sur <a href="https://github.com/sismics/docs">github.com/sismics/docs</a> et entrer son URL ci-dessous]]></string>
|
||||
<string name="server">Serveur</string>
|
||||
<string name="username">Nom d\'utilisateur</string>
|
||||
<string name="password">Mot de passe</string>
|
||||
@@ -83,7 +83,7 @@
|
||||
<string name="file_delete_failure">Erreur réseau lors de la suppression du fichier</string>
|
||||
<string name="file_deleting_message">Suppression du fichier</string>
|
||||
<string name="error_reading_file">Erreur lors de la lecture du fichier</string>
|
||||
<string name="upload_notification_title">Sismics Docs</string>
|
||||
<string name="upload_notification_title">Teedy</string>
|
||||
<string name="upload_notification_message">Envoi du nouveau fichier</string>
|
||||
<string name="upload_notification_error">Erreur lors de l\'envoi du nouveau fichier</string>
|
||||
<string name="delete_file">Supprimer ce fichier</string>
|
||||
@@ -119,9 +119,9 @@
|
||||
<string name="export_comments">Exporter les commentaires</string>
|
||||
<string name="export_metadata">Exporter les métadonnées</string>
|
||||
<string name="mm">mm</string>
|
||||
<string name="download_file_title">Export de fichier Sismics Docs</string>
|
||||
<string name="download_document_title">Export de document Sismics Docs</string>
|
||||
<string name="download_pdf_title">Export PDF Sismics Docs</string>
|
||||
<string name="download_file_title">Export de fichier Teedy</string>
|
||||
<string name="download_document_title">Export de document Teedy</string>
|
||||
<string name="download_pdf_title">Export PDF Teedy</string>
|
||||
<string name="latest_activity">Activité récente</string>
|
||||
<string name="activity">Activité</string>
|
||||
<string name="email">E-mail</string>
|
||||
|
||||
164
docs-android/app/src/main/res/values-pl/strings.xml
Normal file
164
docs-android/app/src/main/res/values-pl/strings.xml
Normal file
@@ -0,0 +1,164 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<resources>
|
||||
|
||||
<!-- Validation -->
|
||||
<string name="validate_error_email">Nieprawidłowy email</string>
|
||||
<string name="validate_error_length_min">Za krótki (min. %d)</string>
|
||||
<string name="validate_error_length_max">Za długi (max. %d)</string>
|
||||
<string name="validate_error_required">Wymagany</string>
|
||||
<string name="validate_error_alphanumeric">Tylko litery i cyfry</string>
|
||||
|
||||
<!-- App -->
|
||||
<string name="app_name" translatable="false">Teedy</string>
|
||||
<string name="drawer_open">Otwórz szufladę nawigacji</string>
|
||||
<string name="drawer_close">Zamknij szufladę nawigacji</string>
|
||||
<string name="login_explain"><![CDATA[Aby rozpocząć, musisz pobrać i zainstalować serwer Teedy na <a href="https://github.com/sismics/docs">github.com/sismics/docs</a> i poniżej wprowadzić adres]]></string>
|
||||
<string name="server">Serwer</string>
|
||||
<string name="username">Użytkownik</string>
|
||||
<string name="password">Hasło</string>
|
||||
<string name="login">Zaloguj</string>
|
||||
<string name="ok">OK</string>
|
||||
<string name="cancel">Anuluj</string>
|
||||
<string name="login_fail_title">Błąd logowania</string>
|
||||
<string name="login_fail">Nieprawidłowa nazwa użytkownika lub hasło</string>
|
||||
<string name="network_error_title">Błąd sieci</string>
|
||||
<string name="network_error">Błąd sieci, sprawdź połączenie z interneterm oraz adres URL serwera</string>
|
||||
<string name="invalid_url_title">Nieprawidłowy adres URL</string>
|
||||
<string name="invalid_url">Sprawdź adres URL serwera i spróbuj ponownie</string>
|
||||
<string name="crash_toast_text">Wystąpiła awaria, wysłano raport w celu rozwiązania tego problemu</string>
|
||||
<string name="created_date">Data utworzenia</string>
|
||||
<string name="download_file">Pobierz bieżący plik</string>
|
||||
<string name="download_document">Pobierz</string>
|
||||
<string name="action_search">Znadź dokumenty</string>
|
||||
<string name="all_documents">Wszystkie dokumenty</string>
|
||||
<string name="shared_documents">Udostępnione dokumenty</string>
|
||||
<string name="all_tags">Wszystkie etykiety</string>
|
||||
<string name="no_tags">Brak etykiet</string>
|
||||
<string name="error_loading_tags">Błąd ładowania etykiet</string>
|
||||
<string name="no_documents">Brak dokumentów</string>
|
||||
<string name="error_loading_documents">Błąd ładowania dokumentów</string>
|
||||
<string name="no_files">Brak plików</string>
|
||||
<string name="error_loading_files">Błąd ładowania plików</string>
|
||||
<string name="new_document">Nowy dokument</string>
|
||||
<string name="share">Udostępnij</string>
|
||||
<string name="close">Zamknij</string>
|
||||
<string name="add">Dodaj</string>
|
||||
<string name="add_share_hint">Nazwa udostępnienia (opcjonalnie)</string>
|
||||
<string name="document_not_shared">Ten dokument nie jest obecnie udostępniony</string>
|
||||
<string name="delete_share">Usuń udostępnienie</string>
|
||||
<string name="send_share">Wyślij link udostępnienia</string>
|
||||
<string name="error_loading_shares">Błąd ładowania udostępnień</string>
|
||||
<string name="error_adding_share">Błąd dodawania udostępnienia</string>
|
||||
<string name="share_default_name">Udostępnij link</string>
|
||||
<string name="error_deleting_share">Błąd usuwania udostępnienia</string>
|
||||
<string name="send_share_to">Wyślij link udostępnienia do</string>
|
||||
<string name="upload_file">dodaj plik</string>
|
||||
<string name="upload_from">Przeslij plik z</string>
|
||||
<string name="settings">ustawienia</string>
|
||||
<string name="logout">Wyloguj</string>
|
||||
<string name="version">Wersja</string>
|
||||
<string name="build">Kompilacja</string>
|
||||
<string name="pref_advanced_category">Ustawienia zaawansowane</string>
|
||||
<string name="pref_about_category">O programie</string>
|
||||
<string name="pref_github">GitHub</string>
|
||||
<string name="pref_issue">Zgłoś błąd</string>
|
||||
<string name="pref_clear_cache_title">Wyczyść cache</string>
|
||||
<string name="pref_clear_cache_summary">Wyczyść podręczne pliki</string>
|
||||
<string name="pref_clear_cache_success">Cache wyczyszczony</string>
|
||||
<string name="pref_clear_history_title">Wyczyść historię wyszukiwania</string>
|
||||
<string name="pref_clear_history_summary">Opróżnij ostatnie sugestie wyszukiwania</string>
|
||||
<string name="pref_clear_history_success">Historia wyszukiwania wyczyszczona</string>
|
||||
<string name="pref_cache_size">Rozmiar cache</string>
|
||||
<string name="language_french" translatable="false">Francuski</string>
|
||||
<string name="language_english" translatable="false">Angielski</string>
|
||||
<string name="language_german" translatable="false">Niemiecki</string>
|
||||
<string name="language_polish" translatable="false">Polski</string>
|
||||
<string name="save">Zapisz</string>
|
||||
<string name="edit_document">Edytuj</string>
|
||||
<string name="error_editing_document">Błąd sieci, spróbuj ponownie</string>
|
||||
<string name="please_wait">Proszę czekać</string>
|
||||
<string name="document_editing_message">Wysyłam twoje dane</string>
|
||||
<string name="delete_document">Usuń</string>
|
||||
<string name="delete_document_title">Usuń dokument</string>
|
||||
<string name="delete_document_message">Naprawdę chcesz usunąć dokument i powiązane z nim pliki?</string>
|
||||
<string name="document_delete_failure">Błąd sieci w czasie usuwania tego dokumentu</string>
|
||||
<string name="document_deleting_message">Usuwanie dokumentu</string>
|
||||
<string name="delete_file_title">Usuń plik</string>
|
||||
<string name="delete_file_message">Naprawdę chcesz usunąć ten plik?</string>
|
||||
<string name="file_delete_failure">Błąd sieci w czasie usuwania bieżącego pliku</string>
|
||||
<string name="file_deleting_message">Usuwanie pliku</string>
|
||||
<string name="error_reading_file">Błąd podczas odczytu pliku</string>
|
||||
<string name="upload_notification_title">Teedy</string>
|
||||
<string name="upload_notification_message">Przesyłanie nowego pliku do dokumentu</string>
|
||||
<string name="upload_notification_error">Błąd przsyłania nowego pliku</string>
|
||||
<string name="delete_file">Usuń bieżący plik</string>
|
||||
<string name="advanced_search">Zaawansowane wyszukiwanie</string>
|
||||
<string name="search">Znajdź</string>
|
||||
<string name="add_tags">Dodaj eytkiety</string>
|
||||
<string name="creation_date">Data utworzenia</string>
|
||||
<string name="description">Opis</string>
|
||||
<string name="title">Tytuł</string>
|
||||
<string name="simple_search">Proste wyszukiwanie</string>
|
||||
<string name="fulltext_search">Wyszukiwanie pełnotekstowe</string>
|
||||
<string name="creator">Autor</string>
|
||||
<string name="after_date">Po dacie</string>
|
||||
<string name="before_date">Przed datą</string>
|
||||
<string name="search_tags">Znajdź etykiety</string>
|
||||
<string name="all_languages">Wszystkie języki</string>
|
||||
<string name="toggle_informations">Przełącz informacje</string>
|
||||
<string name="who_can_access">Kto ma dostęp</string>
|
||||
<string name="comments">Komentarze</string>
|
||||
<string name="no_comments">Brak komentarzy</string>
|
||||
<string name="error_loading_comments">Błąd ładowania komentarzy</string>
|
||||
<string name="send">Wyślij</string>
|
||||
<string name="add_comment">Dodaj komentarz</string>
|
||||
<string name="comment_add_failure">Błąd dodawania komentarza</string>
|
||||
<string name="adding_comment">Dodawanie komentarza</string>
|
||||
<string name="comment_delete">Usuń komentarz</string>
|
||||
<string name="deleting_comment">Usuwanie komentarza</string>
|
||||
<string name="error_deleting_comment">Błąd usuwania komentarza</string>
|
||||
<string name="export_pdf">PDF</string>
|
||||
<string name="download">Pobierz</string>
|
||||
<string name="margin">Margines</string>
|
||||
<string name="fit_image_to_page">Dostosuj obraz do strony</string>
|
||||
<string name="export_comments">Eksport komentarzy</string>
|
||||
<string name="export_metadata">Eksport metadanych</string>
|
||||
<string name="mm">mm</string>
|
||||
<string name="download_file_title">Eksport plików Teedy</string>
|
||||
<string name="download_document_title">Eksport dokumentu Teedy</string>
|
||||
<string name="download_pdf_title">Eksport Teedy jako PDF</string>
|
||||
<string name="latest_activity">Ostatnie aktywności</string>
|
||||
<string name="activity">Aktywności</string>
|
||||
<string name="email">E-mail</string>
|
||||
<string name="storage_quota">Limit magazynu</string>
|
||||
<string name="storage_display">%1$d/%2$d MB</string>
|
||||
<string name="validation_code">Kod weryfikujący</string>
|
||||
<string name="shared">Udostępnienie</string>
|
||||
<string name="language">Język</string>
|
||||
<string name="coverage">Zakres</string>
|
||||
<string name="type">Rodzaj</string>
|
||||
<string name="source">Źródło</string>
|
||||
<string name="format">Format</string>
|
||||
<string name="publisher">Udostępniający</string>
|
||||
<string name="identifier">Identifikator</string>
|
||||
<string name="subject">temat</string>
|
||||
<string name="rights">Prawa</string>
|
||||
<string name="contributors">Współtwórcy</string>
|
||||
<string name="relations">Powiązania</string>
|
||||
|
||||
<!-- Audit log -->
|
||||
<string name="auditlog_Acl">ACL</string>
|
||||
<string name="auditlog_Comment">Komentarz</string>
|
||||
<string name="auditlog_Document">Dokument</string>
|
||||
<string name="auditlog_File">Plik</string>
|
||||
<string name="auditlog_Group">Grupa</string>
|
||||
<string name="auditlog_Route">Przepływ</string>
|
||||
<string name="auditlog_RouteModel">Model przepływu</string>
|
||||
<string name="auditlog_Tag">Etykieta</string>
|
||||
<string name="auditlog_User">Użytkownik</string>
|
||||
<string name="auditlog_Webhook">Webhook</string>
|
||||
<string name="auditlog_created">utworzony</string>
|
||||
<string name="auditlog_updated">zaktualizowany</string>
|
||||
<string name="auditlog_deleted">usunięty</string>
|
||||
|
||||
</resources>
|
||||
@@ -9,10 +9,10 @@
|
||||
<string name="validate_error_alphanumeric">Only letters and numbers</string>
|
||||
|
||||
<!-- App -->
|
||||
<string name="app_name" translatable="false">Sismics Docs</string>
|
||||
<string name="app_name" translatable="false">Teedy</string>
|
||||
<string name="drawer_open">Open navigation drawer</string>
|
||||
<string name="drawer_close">Close navigation drawer</string>
|
||||
<string name="login_explain"><![CDATA[To start, you must download and install Sismics Docs Server on <a href="https://github.com/sismics/docs">github.com/sismics/docs</a> and enter its below]]></string>
|
||||
<string name="login_explain"><![CDATA[To start, you must download and install Teedy Server on <a href="https://github.com/sismics/docs">github.com/sismics/docs</a> and enter its below]]></string>
|
||||
<string name="server">Server</string>
|
||||
<string name="username">Username</string>
|
||||
<string name="password">Password</string>
|
||||
@@ -72,6 +72,7 @@
|
||||
<string name="language_french" translatable="false">Français</string>
|
||||
<string name="language_english" translatable="false">English</string>
|
||||
<string name="language_german" translatable="false">Deutsch</string>
|
||||
<string name="language_polish" translatable="false">Polski</string>
|
||||
<string name="save">Save</string>
|
||||
<string name="edit_document">Edit</string>
|
||||
<string name="error_editing_document">Network error, please try again</string>
|
||||
@@ -87,7 +88,7 @@
|
||||
<string name="file_delete_failure">Network error while deleting the current file</string>
|
||||
<string name="file_deleting_message">Deleting file</string>
|
||||
<string name="error_reading_file">Error while reading the file</string>
|
||||
<string name="upload_notification_title">Sismics Docs</string>
|
||||
<string name="upload_notification_title">Teedy</string>
|
||||
<string name="upload_notification_message">Uploading the new file to the document</string>
|
||||
<string name="upload_notification_error">Error uploading the new file</string>
|
||||
<string name="delete_file">Delete current file</string>
|
||||
@@ -123,9 +124,9 @@
|
||||
<string name="export_comments">Export comments</string>
|
||||
<string name="export_metadata">Export metadata</string>
|
||||
<string name="mm">mm</string>
|
||||
<string name="download_file_title">Sismics Docs file export</string>
|
||||
<string name="download_document_title">Sismics Docs document export</string>
|
||||
<string name="download_pdf_title">Sismics Docs PDF export</string>
|
||||
<string name="download_file_title">Teedy file export</string>
|
||||
<string name="download_document_title">Teedy document export</string>
|
||||
<string name="download_pdf_title">Teedy PDF export</string>
|
||||
<string name="latest_activity">Latest activity</string>
|
||||
<string name="activity">Activity</string>
|
||||
<string name="email">E-mail</string>
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
<resources>
|
||||
|
||||
<style name="AppTheme" parent="Theme.AppCompat.Light.DarkActionBar">
|
||||
<style name="AppTheme" parent="Theme.AppCompat.DayNight">
|
||||
<item name="colorPrimary">@color/colorPrimary</item>
|
||||
<item name="colorPrimaryDark">@color/colorPrimaryDark</item>
|
||||
<item name="colorAccent">@color/colorAccent</item>
|
||||
</style>
|
||||
|
||||
<style name="AppTheme.NoActionBar" parent="Theme.AppCompat.Light.DarkActionBar">
|
||||
<style name="AppTheme.NoActionBar" parent="Theme.AppCompat.DayNight.NoActionBar">
|
||||
<item name="windowActionBar">false</item>
|
||||
<item name="windowNoTitle">true</item>
|
||||
<item name="colorPrimary">@color/colorPrimary</item>
|
||||
@@ -14,7 +14,7 @@
|
||||
<item name="colorAccent">@color/colorAccent</item>
|
||||
</style>
|
||||
|
||||
<style name="AppThemeDark" parent="Theme.AppCompat.NoActionBar">
|
||||
<style name="AppThemeDark" parent="Theme.AppCompat.DayNight.NoActionBar">
|
||||
<item name="colorPrimary">@color/colorPrimary</item>
|
||||
<item name="colorPrimaryDark">@color/colorPrimaryDark</item>
|
||||
<item name="colorAccent">@color/colorAccent</item>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#Wed Jan 30 16:31:31 CET 2019
|
||||
#Tue May 07 11:49:13 CEST 2019
|
||||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
zipStorePath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-4.10.1-all.zip
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-5.1.1-all.zip
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>com.sismics.docs</groupId>
|
||||
<artifactId>docs-parent</artifactId>
|
||||
<version>1.6-SNAPSHOT</version>
|
||||
<version>1.11</version>
|
||||
<relativePath>..</relativePath>
|
||||
</parent>
|
||||
|
||||
@@ -91,10 +91,10 @@
|
||||
<groupId>org.slf4j</groupId>
|
||||
<artifactId>jcl-over-slf4j</artifactId>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>org.mindrot</groupId>
|
||||
<artifactId>jbcrypt</artifactId>
|
||||
<groupId>at.favre.lib</groupId>
|
||||
<artifactId>bcrypt</artifactId>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
@@ -131,7 +131,12 @@
|
||||
<groupId>com.squareup.okhttp3</groupId>
|
||||
<artifactId>okhttp</artifactId>
|
||||
</dependency>
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>org.apache.directory.api</groupId>
|
||||
<artifactId>api-all</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- Only there to read old index and rebuild them -->
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
@@ -189,7 +194,26 @@
|
||||
<groupId>org.postgresql</groupId>
|
||||
<artifactId>postgresql</artifactId>
|
||||
</dependency>
|
||||
|
||||
|
||||
<!-- JDK 11 JAXB dependencies -->
|
||||
<dependency>
|
||||
<groupId>javax.xml.bind</groupId>
|
||||
<artifactId>jaxb-api</artifactId>
|
||||
<version>2.3.0</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.sun.xml.bind</groupId>
|
||||
<artifactId>jaxb-core</artifactId>
|
||||
<version>2.3.0</version>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>com.sun.xml.bind</groupId>
|
||||
<artifactId>jaxb-impl</artifactId>
|
||||
<version>2.3.0</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Test dependencies -->
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
|
||||
@@ -42,5 +42,21 @@ public enum ConfigType {
|
||||
INBOX_PORT,
|
||||
INBOX_USERNAME,
|
||||
INBOX_PASSWORD,
|
||||
INBOX_TAG
|
||||
INBOX_FOLDER,
|
||||
INBOX_TAG,
|
||||
INBOX_AUTOMATIC_TAGS,
|
||||
INBOX_DELETE_IMPORTED,
|
||||
|
||||
/**
|
||||
* LDAP connection.
|
||||
*/
|
||||
LDAP_ENABLED,
|
||||
LDAP_HOST,
|
||||
LDAP_PORT,
|
||||
LDAP_ADMIN_DN,
|
||||
LDAP_ADMIN_PASSWORD,
|
||||
LDAP_BASE_DN,
|
||||
LDAP_FILTER,
|
||||
LDAP_DEFAULT_EMAIL,
|
||||
LDAP_DEFAULT_STORAGE
|
||||
}
|
||||
|
||||
@@ -18,13 +18,18 @@ public class Constants {
|
||||
/**
|
||||
* Administrator's default password ("admin").
|
||||
*/
|
||||
public static final String DEFAULT_ADMIN_PASSWORD = "$2a$05$6Ny3TjrW3aVAL1or2SlcR.fhuDgPKp5jp.P9fBXwVNePgeLqb4i3C";
|
||||
public static final String DEFAULT_ADMIN_PASSWORD = "$2y$10$xg0EEKVUehutDI1m6qQhVeFz7SMQMl1jQzjf2KkVsR2c7aV2vyyjK";
|
||||
|
||||
/**
|
||||
* Administrator's default email.
|
||||
*/
|
||||
public static final String DEFAULT_ADMIN_EMAIL = "admin@localhost";
|
||||
|
||||
/**
|
||||
* Bcrypt default work factor
|
||||
*/
|
||||
public static final int DEFAULT_BCRYPT_WORK = 10;
|
||||
|
||||
/**
|
||||
* Guest user ID.
|
||||
*/
|
||||
@@ -38,7 +43,7 @@ public class Constants {
|
||||
/**
|
||||
* Supported document languages.
|
||||
*/
|
||||
public static final List<String> SUPPORTED_LANGUAGES = Lists.newArrayList("eng", "fra", "ita", "deu", "spa", "por", "pol", "rus", "ukr", "ara", "hin", "chi_sim", "chi_tra", "jpn", "tha", "kor", "nld");
|
||||
public static final List<String> SUPPORTED_LANGUAGES = Lists.newArrayList("eng", "fra", "ita", "deu", "spa", "por", "pol", "rus", "ukr", "ara", "hin", "chi_sim", "chi_tra", "jpn", "tha", "kor", "nld", "tur", "heb", "hun", "fin", "swe", "lav", "dan", "nor", "vie", "ces");
|
||||
|
||||
/**
|
||||
* Base URL environment variable.
|
||||
@@ -73,6 +78,11 @@ public class Constants {
|
||||
*/
|
||||
public static final String ADMIN_EMAIL_INIT_ENV = "DOCS_ADMIN_EMAIL_INIT";
|
||||
|
||||
/**
|
||||
* Work factor to be used by Bcrypt
|
||||
*/
|
||||
public static final String BCRYPT_WORK_ENV = "DOCS_BCRYPT_WORK";
|
||||
|
||||
/**
|
||||
* Expiration time of the password recovery in hours.
|
||||
*/
|
||||
|
||||
@@ -0,0 +1,14 @@
|
||||
package com.sismics.docs.core.constant;
|
||||
|
||||
/**
|
||||
* Metadata type.
|
||||
*
|
||||
* @author bgamard
|
||||
*/
|
||||
public enum MetadataType {
|
||||
STRING,
|
||||
INTEGER,
|
||||
FLOAT,
|
||||
DATE,
|
||||
BOOLEAN
|
||||
}
|
||||
@@ -128,6 +128,9 @@ public class AclDao {
|
||||
if (SecurityUtil.skipAclCheck(targetIdList)) {
|
||||
return true;
|
||||
}
|
||||
if (targetIdList.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
StringBuilder sb = new StringBuilder("select a.ACL_ID_C from T_ACL a ");
|
||||
|
||||
@@ -61,6 +61,7 @@ public class AuditLogDao {
|
||||
queries.add(baseQuery + " where l.LOG_IDENTITY_C in (select f.FIL_ID_C from T_FILE f where f.FIL_IDDOC_C = :documentId) ");
|
||||
queries.add(baseQuery + " where l.LOG_IDENTITY_C in (select c.COM_ID_C from T_COMMENT c where c.COM_IDDOC_C = :documentId) ");
|
||||
queries.add(baseQuery + " where l.LOG_IDENTITY_C in (select a.ACL_ID_C from T_ACL a where a.ACL_SOURCEID_C = :documentId) ");
|
||||
queries.add(baseQuery + " where l.LOG_IDENTITY_C in (select r.RTE_ID_C from T_ROUTE r where r.RTE_IDDOCUMENT_C = :documentId) ");
|
||||
parameterMap.put("documentId", criteria.getDocumentId());
|
||||
}
|
||||
|
||||
|
||||
@@ -27,7 +27,6 @@ public class CommentDao {
|
||||
* @param comment Comment
|
||||
* @param userId User ID
|
||||
* @return New ID
|
||||
* @throws Exception
|
||||
*/
|
||||
public String create(Comment comment, String userId) {
|
||||
// Create the UUID
|
||||
@@ -99,7 +98,7 @@ public class CommentDao {
|
||||
@SuppressWarnings("unchecked")
|
||||
List<Object[]> l = q.getResultList();
|
||||
|
||||
List<CommentDto> commentDtoList = new ArrayList<CommentDto>();
|
||||
List<CommentDto> commentDtoList = new ArrayList<>();
|
||||
for (Object[] o : l) {
|
||||
int i = 0;
|
||||
CommentDto commentDto = new CommentDto();
|
||||
@@ -107,7 +106,7 @@ public class CommentDao {
|
||||
commentDto.setContent((String) o[i++]);
|
||||
commentDto.setCreateTimestamp(((Timestamp) o[i++]).getTime());
|
||||
commentDto.setCreatorName((String) o[i++]);
|
||||
commentDto.setCreatorEmail((String) o[i++]);
|
||||
commentDto.setCreatorEmail((String) o[i]);
|
||||
commentDtoList.add(commentDto);
|
||||
}
|
||||
return commentDtoList;
|
||||
|
||||
@@ -56,7 +56,7 @@ public class ContributorDao {
|
||||
@SuppressWarnings("unchecked")
|
||||
public List<ContributorDto> getByDocumentId(String documentId) {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
StringBuilder sb = new StringBuilder("select u.USE_USERNAME_C, u.USE_EMAIL_C from T_CONTRIBUTOR c ");
|
||||
StringBuilder sb = new StringBuilder("select distinct u.USE_USERNAME_C, u.USE_EMAIL_C from T_CONTRIBUTOR c ");
|
||||
sb.append(" join T_USER u on u.USE_ID_C = c.CTR_IDUSER_C ");
|
||||
sb.append(" where c.CTR_IDDOC_C = :documentId ");
|
||||
Query q = em.createNativeQuery(sb.toString());
|
||||
|
||||
@@ -10,6 +10,7 @@ import com.sismics.util.context.ThreadLocalContext;
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.NoResultException;
|
||||
import javax.persistence.Query;
|
||||
import javax.persistence.TypedQuery;
|
||||
import java.sql.Timestamp;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
@@ -50,10 +51,9 @@ public class DocumentDao {
|
||||
* @param limit Limit
|
||||
* @return List of documents
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public List<Document> findAll(int offset, int limit) {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
Query q = em.createQuery("select d from Document d where d.deleteDate is null");
|
||||
TypedQuery<Document> q = em.createQuery("select d from Document d where d.deleteDate is null", Document.class);
|
||||
q.setFirstResult(offset);
|
||||
q.setMaxResults(limit);
|
||||
return q.getResultList();
|
||||
@@ -65,10 +65,9 @@ public class DocumentDao {
|
||||
* @param userId User ID
|
||||
* @return List of documents
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public List<Document> findByUserId(String userId) {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
Query q = em.createQuery("select d from Document d where d.userId = :userId and d.deleteDate is null");
|
||||
TypedQuery<Document> q = em.createQuery("select d from Document d where d.userId = :userId and d.deleteDate is null", Document.class);
|
||||
q.setParameter("userId", userId);
|
||||
return q.getResultList();
|
||||
}
|
||||
@@ -138,16 +137,16 @@ public class DocumentDao {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
|
||||
// Get the document
|
||||
Query q = em.createQuery("select d from Document d where d.id = :id and d.deleteDate is null");
|
||||
q.setParameter("id", id);
|
||||
Document documentDb = (Document) q.getSingleResult();
|
||||
TypedQuery<Document> dq = em.createQuery("select d from Document d where d.id = :id and d.deleteDate is null", Document.class);
|
||||
dq.setParameter("id", id);
|
||||
Document documentDb = dq.getSingleResult();
|
||||
|
||||
// Delete the document
|
||||
Date dateNow = new Date();
|
||||
documentDb.setDeleteDate(dateNow);
|
||||
|
||||
// Delete linked data
|
||||
q = em.createQuery("update File f set f.deleteDate = :dateNow where f.documentId = :documentId and f.deleteDate is null");
|
||||
Query q = em.createQuery("update File f set f.deleteDate = :dateNow where f.documentId = :documentId and f.deleteDate is null");
|
||||
q.setParameter("documentId", id);
|
||||
q.setParameter("dateNow", dateNow);
|
||||
q.executeUpdate();
|
||||
@@ -179,10 +178,10 @@ public class DocumentDao {
|
||||
*/
|
||||
public Document getById(String id) {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
Query q = em.createQuery("select d from Document d where d.id = :id and d.deleteDate is null");
|
||||
TypedQuery<Document> q = em.createQuery("select d from Document d where d.id = :id and d.deleteDate is null", Document.class);
|
||||
q.setParameter("id", id);
|
||||
try {
|
||||
return (Document) q.getSingleResult();
|
||||
return q.getSingleResult();
|
||||
} catch (NoResultException e) {
|
||||
return null;
|
||||
}
|
||||
@@ -196,27 +195,12 @@ public class DocumentDao {
|
||||
* @return Updated document
|
||||
*/
|
||||
public Document update(Document document, String userId) {
|
||||
Document documentDb = updateSilently(document);
|
||||
|
||||
// Create audit log
|
||||
AuditLogUtil.create(documentDb, AuditLogType.UPDATE, userId);
|
||||
|
||||
return documentDb;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a document without audit log.
|
||||
*
|
||||
* @param document Document to update
|
||||
* @return Updated document
|
||||
*/
|
||||
public Document updateSilently(Document document) {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
|
||||
// Get the document
|
||||
Query q = em.createQuery("select d from Document d where d.id = :id and d.deleteDate is null");
|
||||
TypedQuery<Document> q = em.createQuery("select d from Document d where d.id = :id and d.deleteDate is null", Document.class);
|
||||
q.setParameter("id", document.getId());
|
||||
Document documentDb = (Document) q.getSingleResult();
|
||||
Document documentDb = q.getSingleResult();
|
||||
|
||||
// Update the document
|
||||
documentDb.setTitle(document.getTitle());
|
||||
@@ -233,10 +217,27 @@ public class DocumentDao {
|
||||
documentDb.setLanguage(document.getLanguage());
|
||||
documentDb.setFileId(document.getFileId());
|
||||
documentDb.setUpdateDate(new Date());
|
||||
|
||||
|
||||
// Create audit log
|
||||
AuditLogUtil.create(documentDb, AuditLogType.UPDATE, userId);
|
||||
|
||||
return documentDb;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the file ID on a document.
|
||||
*
|
||||
* @param document Document
|
||||
*/
|
||||
public void updateFileId(Document document) {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
Query query = em.createNativeQuery("update T_DOCUMENT d set DOC_IDFILE_C = :fileId, DOC_UPDATEDATE_D = :updateDate where d.DOC_ID_C = :id");
|
||||
query.setParameter("updateDate", new Date());
|
||||
query.setParameter("fileId", document.getFileId());
|
||||
query.setParameter("id", document.getId());
|
||||
query.executeUpdate();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the number of documents.
|
||||
*
|
||||
|
||||
@@ -0,0 +1,89 @@
|
||||
package com.sismics.docs.core.dao;
|
||||
|
||||
import com.sismics.docs.core.constant.MetadataType;
|
||||
import com.sismics.docs.core.dao.dto.DocumentMetadataDto;
|
||||
import com.sismics.docs.core.model.jpa.DocumentMetadata;
|
||||
import com.sismics.util.context.ThreadLocalContext;
|
||||
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.Query;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
/**
|
||||
* Document metadata DAO.
|
||||
*
|
||||
* @author bgamard
|
||||
*/
|
||||
public class DocumentMetadataDao {
|
||||
/**
|
||||
* Creates a new document metadata.
|
||||
*
|
||||
* @param documentMetadata Document metadata
|
||||
* @return New ID
|
||||
*/
|
||||
public String create(DocumentMetadata documentMetadata) {
|
||||
// Create the UUID
|
||||
documentMetadata.setId(UUID.randomUUID().toString());
|
||||
|
||||
// Create the document metadata
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
em.persist(documentMetadata);
|
||||
|
||||
return documentMetadata.getId();
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates a document metadata.
|
||||
*
|
||||
* @param documentMetadata Document metadata
|
||||
* @return Updated document metadata
|
||||
*/
|
||||
public DocumentMetadata update(DocumentMetadata documentMetadata) {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
|
||||
// Get the document metadata
|
||||
Query q = em.createQuery("select u from DocumentMetadata u where u.id = :id");
|
||||
q.setParameter("id", documentMetadata.getId());
|
||||
DocumentMetadata documentMetadataDb = (DocumentMetadata) q.getSingleResult();
|
||||
|
||||
// Update the document metadata
|
||||
documentMetadataDb.setValue(documentMetadata.getValue());
|
||||
|
||||
return documentMetadata;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the list of all metadata values on a document.
|
||||
*
|
||||
* @param documentId Document ID
|
||||
* @return List of metadata
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public List<DocumentMetadataDto> getByDocumentId(String documentId) {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
StringBuilder sb = new StringBuilder("select dm.DME_ID_C, dm.DME_IDDOCUMENT_C, dm.DME_IDMETADATA_C, dm.DME_VALUE_C, m.MET_TYPE_C");
|
||||
sb.append(" from T_DOCUMENT_METADATA dm, T_METADATA m ");
|
||||
sb.append(" where dm.DME_IDMETADATA_C = m.MET_ID_C and dm.DME_IDDOCUMENT_C = :documentId and m.MET_DELETEDATE_D is null");
|
||||
|
||||
// Perform the search
|
||||
Query q = em.createNativeQuery(sb.toString());
|
||||
q.setParameter("documentId", documentId);
|
||||
List<Object[]> l = q.getResultList();
|
||||
|
||||
// Assemble results
|
||||
List<DocumentMetadataDto> dtoList = new ArrayList<>();
|
||||
for (Object[] o : l) {
|
||||
int i = 0;
|
||||
DocumentMetadataDto dto = new DocumentMetadataDto();
|
||||
dto.setId((String) o[i++]);
|
||||
dto.setDocumentId((String) o[i++]);
|
||||
dto.setMetadataId((String) o[i++]);
|
||||
dto.setValue((String) o[i++]);
|
||||
dto.setType(MetadataType.valueOf((String) o[i]));
|
||||
dtoList.add(dto);
|
||||
}
|
||||
return dtoList;
|
||||
}
|
||||
}
|
||||
@@ -7,7 +7,8 @@ import com.sismics.util.context.ThreadLocalContext;
|
||||
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.NoResultException;
|
||||
import javax.persistence.Query;
|
||||
import javax.persistence.TypedQuery;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
@@ -47,10 +48,9 @@ public class FileDao {
|
||||
* @param limit Limit
|
||||
* @return List of files
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public List<File> findAll(int offset, int limit) {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
Query q = em.createQuery("select f from File f where f.deleteDate is null");
|
||||
TypedQuery<File> q = em.createQuery("select f from File f where f.deleteDate is null", File.class);
|
||||
q.setFirstResult(offset);
|
||||
q.setMaxResults(limit);
|
||||
return q.getResultList();
|
||||
@@ -62,28 +62,38 @@ public class FileDao {
|
||||
* @param userId User ID
|
||||
* @return List of files
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public List<File> findByUserId(String userId) {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
Query q = em.createQuery("select f from File f where f.userId = :userId and f.deleteDate is null");
|
||||
TypedQuery<File> q = em.createQuery("select f from File f where f.userId = :userId and f.deleteDate is null", File.class);
|
||||
q.setParameter("userId", userId);
|
||||
return q.getResultList();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a list of active files.
|
||||
*
|
||||
* @param ids Files IDs
|
||||
* @return List of files
|
||||
*/
|
||||
public List<File> getFiles(List<String> ids) {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
TypedQuery<File> q = em.createQuery("select f from File f where f.id in :ids and f.deleteDate is null", File.class);
|
||||
q.setParameter("ids", ids);
|
||||
return q.getResultList();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns an active file.
|
||||
* Returns an active file or null.
|
||||
*
|
||||
* @param id File ID
|
||||
* @return Document
|
||||
* @return File
|
||||
*/
|
||||
public File getFile(String id) {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
Query q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null");
|
||||
q.setParameter("id", id);
|
||||
try {
|
||||
return (File) q.getSingleResult();
|
||||
} catch (NoResultException e) {
|
||||
List<File> files = getFiles(List.of(id));
|
||||
if (files.isEmpty()) {
|
||||
return null;
|
||||
} else {
|
||||
return files.get(0);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -92,15 +102,15 @@ public class FileDao {
|
||||
*
|
||||
* @param id File ID
|
||||
* @param userId User ID
|
||||
* @return Document
|
||||
* @return File
|
||||
*/
|
||||
public File getFile(String id, String userId) {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
Query q = em.createQuery("select f from File f where f.id = :id and f.userId = :userId and f.deleteDate is null");
|
||||
TypedQuery<File> q = em.createQuery("select f from File f where f.id = :id and f.userId = :userId and f.deleteDate is null", File.class);
|
||||
q.setParameter("id", id);
|
||||
q.setParameter("userId", userId);
|
||||
try {
|
||||
return (File) q.getSingleResult();
|
||||
return q.getSingleResult();
|
||||
} catch (NoResultException e) {
|
||||
return null;
|
||||
}
|
||||
@@ -116,9 +126,9 @@ public class FileDao {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
|
||||
// Get the file
|
||||
Query q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null");
|
||||
TypedQuery<File> q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null", File.class);
|
||||
q.setParameter("id", id);
|
||||
File fileDb = (File) q.getSingleResult();
|
||||
File fileDb = q.getSingleResult();
|
||||
|
||||
// Delete the file
|
||||
Date dateNow = new Date();
|
||||
@@ -138,9 +148,9 @@ public class FileDao {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
|
||||
// Get the file
|
||||
Query q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null");
|
||||
TypedQuery<File> q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null", File.class);
|
||||
q.setParameter("id", file.getId());
|
||||
File fileDb = (File) q.getSingleResult();
|
||||
File fileDb = q.getSingleResult();
|
||||
|
||||
// Update the file
|
||||
fileDb.setDocumentId(file.getDocumentId());
|
||||
@@ -153,7 +163,7 @@ public class FileDao {
|
||||
|
||||
return file;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Gets a file by its ID.
|
||||
*
|
||||
@@ -162,32 +172,43 @@ public class FileDao {
|
||||
*/
|
||||
public File getActiveById(String id) {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
Query q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null");
|
||||
TypedQuery<File> q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null", File.class);
|
||||
q.setParameter("id", id);
|
||||
try {
|
||||
return (File) q.getSingleResult();
|
||||
return q.getSingleResult();
|
||||
} catch (NoResultException e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get files by document ID or all orphan files of an user.
|
||||
* Get files by document ID or all orphan files of a user.
|
||||
*
|
||||
* @param userId User ID
|
||||
* @param documentId Document ID
|
||||
* @return List of files
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public List<File> getByDocumentId(String userId, String documentId) {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
if (documentId == null) {
|
||||
Query q = em.createQuery("select f from File f where f.documentId is null and f.deleteDate is null and f.latestVersion = true and f.userId = :userId order by f.createDate asc");
|
||||
TypedQuery<File> q = em.createQuery("select f from File f where f.documentId is null and f.deleteDate is null and f.latestVersion = true and f.userId = :userId order by f.createDate asc", File.class);
|
||||
q.setParameter("userId", userId);
|
||||
return q.getResultList();
|
||||
} else {
|
||||
return getByDocumentsIds(Collections.singleton(documentId));
|
||||
}
|
||||
Query q = em.createQuery("select f from File f where f.documentId = :documentId and f.latestVersion = true and f.deleteDate is null order by f.order asc");
|
||||
q.setParameter("documentId", documentId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get files by documents IDs.
|
||||
*
|
||||
* @param documentIds Documents IDs
|
||||
* @return List of files
|
||||
*/
|
||||
public List<File> getByDocumentsIds(Iterable<String> documentIds) {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
TypedQuery<File> q = em.createQuery("select f from File f where f.documentId in :documentIds and f.latestVersion = true and f.deleteDate is null order by f.order asc", File.class);
|
||||
q.setParameter("documentIds", documentIds);
|
||||
return q.getResultList();
|
||||
}
|
||||
|
||||
@@ -197,10 +218,9 @@ public class FileDao {
|
||||
* @param versionId Version ID
|
||||
* @return List of files
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public List<File> getByVersionId(String versionId) {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
Query q = em.createQuery("select f from File f where f.versionId = :versionId and f.deleteDate is null order by f.order asc");
|
||||
TypedQuery<File> q = em.createQuery("select f from File f where f.versionId = :versionId and f.deleteDate is null order by f.order asc", File.class);
|
||||
q.setParameter("versionId", versionId);
|
||||
return q.getResultList();
|
||||
}
|
||||
|
||||
@@ -183,12 +183,10 @@ public class GroupDao {
|
||||
}
|
||||
|
||||
criteriaList.add("g.GRP_DELETEDATE_D is null");
|
||||
|
||||
if (!criteriaList.isEmpty()) {
|
||||
sb.append(" where ");
|
||||
sb.append(Joiner.on(" and ").join(criteriaList));
|
||||
}
|
||||
|
||||
|
||||
sb.append(" where ");
|
||||
sb.append(Joiner.on(" and ").join(criteriaList));
|
||||
|
||||
// Perform the search
|
||||
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);
|
||||
@SuppressWarnings("unchecked")
|
||||
|
||||
@@ -0,0 +1,146 @@
|
||||
package com.sismics.docs.core.dao;
|
||||
|
||||
import com.google.common.base.Joiner;
|
||||
import com.sismics.docs.core.constant.AuditLogType;
|
||||
import com.sismics.docs.core.constant.MetadataType;
|
||||
import com.sismics.docs.core.dao.criteria.MetadataCriteria;
|
||||
import com.sismics.docs.core.dao.dto.MetadataDto;
|
||||
import com.sismics.docs.core.model.jpa.Metadata;
|
||||
import com.sismics.docs.core.util.AuditLogUtil;
|
||||
import com.sismics.docs.core.util.jpa.QueryParam;
|
||||
import com.sismics.docs.core.util.jpa.QueryUtil;
|
||||
import com.sismics.docs.core.util.jpa.SortCriteria;
|
||||
import com.sismics.util.context.ThreadLocalContext;
|
||||
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.NoResultException;
|
||||
import javax.persistence.Query;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* Metadata DAO.
|
||||
*
|
||||
* @author bgamard
|
||||
*/
|
||||
public class MetadataDao {
|
||||
/**
|
||||
* Creates a new metdata.
|
||||
*
|
||||
* @param metadata Metadata
|
||||
* @param userId User ID
|
||||
* @return New ID
|
||||
*/
|
||||
public String create(Metadata metadata, String userId) {
|
||||
// Create the UUID
|
||||
metadata.setId(UUID.randomUUID().toString());
|
||||
|
||||
// Create the metadata
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
em.persist(metadata);
|
||||
|
||||
// Create audit log
|
||||
AuditLogUtil.create(metadata, AuditLogType.CREATE, userId);
|
||||
|
||||
return metadata.getId();
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a metadata.
|
||||
*
|
||||
* @param metadata Metadata to update
|
||||
* @param userId User ID
|
||||
* @return Updated metadata
|
||||
*/
|
||||
public Metadata update(Metadata metadata, String userId) {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
|
||||
// Get the metadata
|
||||
Query q = em.createQuery("select r from Metadata r where r.id = :id and r.deleteDate is null");
|
||||
q.setParameter("id", metadata.getId());
|
||||
Metadata metadataDb = (Metadata) q.getSingleResult();
|
||||
|
||||
// Update the metadata
|
||||
metadataDb.setName(metadata.getName());
|
||||
|
||||
// Create audit log
|
||||
AuditLogUtil.create(metadataDb, AuditLogType.UPDATE, userId);
|
||||
|
||||
return metadataDb;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets an active metadata by its ID.
|
||||
*
|
||||
* @param id Metadata ID
|
||||
* @return Metadata
|
||||
*/
|
||||
public Metadata getActiveById(String id) {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
try {
|
||||
Query q = em.createQuery("select r from Metadata r where r.id = :id and r.deleteDate is null");
|
||||
q.setParameter("id", id);
|
||||
return (Metadata) q.getSingleResult();
|
||||
} catch (NoResultException e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes a metadata.
|
||||
*
|
||||
* @param id Metadata ID
|
||||
* @param userId User ID
|
||||
*/
|
||||
public void delete(String id, String userId) {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
|
||||
// Get the metadata
|
||||
Query q = em.createQuery("select r from Metadata r where r.id = :id and r.deleteDate is null");
|
||||
q.setParameter("id", id);
|
||||
Metadata metadataDb = (Metadata) q.getSingleResult();
|
||||
|
||||
// Delete the metadata
|
||||
Date dateNow = new Date();
|
||||
metadataDb.setDeleteDate(dateNow);
|
||||
|
||||
// Create audit log
|
||||
AuditLogUtil.create(metadataDb, AuditLogType.DELETE, userId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the list of all metadata.
|
||||
*
|
||||
* @param criteria Search criteria
|
||||
* @param sortCriteria Sort criteria
|
||||
* @return List of metadata
|
||||
*/
|
||||
public List<MetadataDto> findByCriteria(MetadataCriteria criteria, SortCriteria sortCriteria) {
|
||||
Map<String, Object> parameterMap = new HashMap<>();
|
||||
List<String> criteriaList = new ArrayList<>();
|
||||
|
||||
StringBuilder sb = new StringBuilder("select m.MET_ID_C c0, m.MET_NAME_C c1, m.MET_TYPE_C c2");
|
||||
sb.append(" from T_METADATA m ");
|
||||
|
||||
criteriaList.add("m.MET_DELETEDATE_D is null");
|
||||
|
||||
sb.append(" where ");
|
||||
sb.append(Joiner.on(" and ").join(criteriaList));
|
||||
|
||||
// Perform the search
|
||||
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);
|
||||
@SuppressWarnings("unchecked")
|
||||
List<Object[]> l = QueryUtil.getNativeQuery(queryParam).getResultList();
|
||||
|
||||
// Assemble results
|
||||
List<MetadataDto> dtoList = new ArrayList<>();
|
||||
for (Object[] o : l) {
|
||||
int i = 0;
|
||||
MetadataDto dto = new MetadataDto();
|
||||
dto.setId((String) o[i++]);
|
||||
dto.setName((String) o[i++]);
|
||||
dto.setType(MetadataType.valueOf((String) o[i]));
|
||||
dtoList.add(dto);
|
||||
}
|
||||
return dtoList;
|
||||
}
|
||||
}
|
||||
@@ -36,13 +36,13 @@ public class RelationDao {
|
||||
List<Object[]> l = q.getResultList();
|
||||
|
||||
// Assemble results
|
||||
List<RelationDto> relationDtoList = new ArrayList<RelationDto>();
|
||||
List<RelationDto> relationDtoList = new ArrayList<>();
|
||||
for (Object[] o : l) {
|
||||
int i = 0;
|
||||
RelationDto relationDto = new RelationDto();
|
||||
relationDto.setId((String) o[i++]);
|
||||
relationDto.setTitle((String) o[i++]);
|
||||
String fromDocId = (String) o[i++];
|
||||
String fromDocId = (String) o[i];
|
||||
relationDto.setSource(documentId.equals(fromDocId));
|
||||
relationDtoList.add(relationDto);
|
||||
}
|
||||
|
||||
@@ -64,10 +64,8 @@ public class RouteDao {
|
||||
}
|
||||
criteriaList.add("r.RTE_DELETEDATE_D is null");
|
||||
|
||||
if (!criteriaList.isEmpty()) {
|
||||
sb.append(" where ");
|
||||
sb.append(Joiner.on(" and ").join(criteriaList));
|
||||
}
|
||||
sb.append(" where ");
|
||||
sb.append(Joiner.on(" and ").join(criteriaList));
|
||||
|
||||
// Perform the search
|
||||
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);
|
||||
@@ -91,10 +89,15 @@ public class RouteDao {
|
||||
* Deletes a route and the associated steps.
|
||||
*
|
||||
* @param routeId Route ID
|
||||
* @param userId User ID
|
||||
*/
|
||||
public void deleteRoute(String routeId) {
|
||||
public void deleteRoute(String routeId, String userId) {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
|
||||
// Create audit log
|
||||
Route route = em.find(Route.class, routeId);
|
||||
AuditLogUtil.create(route, AuditLogType.DELETE, userId);
|
||||
|
||||
em.createNativeQuery("update T_ROUTE_STEP rs set RTP_DELETEDATE_D = :dateNow where rs.RTP_IDROUTE_C = :routeId and rs.RTP_DELETEDATE_D is null")
|
||||
.setParameter("routeId", routeId)
|
||||
.setParameter("dateNow", new Date())
|
||||
|
||||
@@ -61,7 +61,7 @@ public class RouteModelDao {
|
||||
q.setParameter("id", routeModel.getId());
|
||||
RouteModel routeModelDb = (RouteModel) q.getSingleResult();
|
||||
|
||||
// Update the group
|
||||
// Update the route model
|
||||
routeModelDb.setName(routeModel.getName());
|
||||
routeModelDb.setSteps(routeModel.getSteps());
|
||||
|
||||
@@ -88,6 +88,18 @@ public class RouteModelDao {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the list of all route models.
|
||||
*
|
||||
* @return List of route models
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public List<RouteModel> findAll() {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
Query q = em.createQuery("select r from RouteModel r where r.deleteDate is null");
|
||||
return q.getResultList();
|
||||
}
|
||||
|
||||
/**
|
||||
* Deletes a route model.
|
||||
*
|
||||
@@ -133,10 +145,8 @@ public class RouteModelDao {
|
||||
|
||||
criteriaList.add("rm.RTM_DELETEDATE_D is null");
|
||||
|
||||
if (!criteriaList.isEmpty()) {
|
||||
sb.append(" where ");
|
||||
sb.append(Joiner.on(" and ").join(criteriaList));
|
||||
}
|
||||
sb.append(" where ");
|
||||
sb.append(Joiner.on(" and ").join(criteriaList));
|
||||
|
||||
// Perform the search
|
||||
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);
|
||||
|
||||
@@ -90,10 +90,8 @@ public class RouteStepDao {
|
||||
}
|
||||
criteriaList.add("rs.RTP_DELETEDATE_D is null");
|
||||
|
||||
if (!criteriaList.isEmpty()) {
|
||||
sb.append(" where ");
|
||||
sb.append(Joiner.on(" and ").join(criteriaList));
|
||||
}
|
||||
sb.append(" where ");
|
||||
sb.append(Joiner.on(" and ").join(criteriaList));
|
||||
|
||||
// Perform the search
|
||||
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);
|
||||
|
||||
@@ -19,7 +19,6 @@ public class ShareDao {
|
||||
*
|
||||
* @param share Share
|
||||
* @return New ID
|
||||
* @throws Exception
|
||||
*/
|
||||
public String create(Share share) {
|
||||
// Create the UUID
|
||||
|
||||
@@ -199,10 +199,8 @@ public class TagDao {
|
||||
|
||||
criteriaList.add("t.TAG_DELETEDATE_D is null");
|
||||
|
||||
if (!criteriaList.isEmpty()) {
|
||||
sb.append(" where ");
|
||||
sb.append(Joiner.on(" and ").join(criteriaList));
|
||||
}
|
||||
sb.append(" where ");
|
||||
sb.append(Joiner.on(" and ").join(criteriaList));
|
||||
|
||||
// Perform the search
|
||||
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);
|
||||
|
||||
@@ -1,7 +1,14 @@
|
||||
package com.sismics.docs.core.dao;
|
||||
|
||||
import com.google.common.base.Joiner;
|
||||
import com.google.common.base.Strings;
|
||||
import at.favre.lib.crypto.bcrypt.BCrypt;
|
||||
import org.joda.time.DateTime;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import com.sismics.docs.core.constant.AuditLogType;
|
||||
import com.sismics.docs.core.constant.Constants;
|
||||
import com.sismics.docs.core.dao.criteria.UserCriteria;
|
||||
import com.sismics.docs.core.dao.dto.UserDto;
|
||||
import com.sismics.docs.core.model.jpa.User;
|
||||
@@ -11,8 +18,6 @@ import com.sismics.docs.core.util.jpa.QueryParam;
|
||||
import com.sismics.docs.core.util.jpa.QueryUtil;
|
||||
import com.sismics.docs.core.util.jpa.SortCriteria;
|
||||
import com.sismics.util.context.ThreadLocalContext;
|
||||
import org.joda.time.DateTime;
|
||||
import org.mindrot.jbcrypt.BCrypt;
|
||||
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.NoResultException;
|
||||
@@ -26,6 +31,11 @@ import java.util.*;
|
||||
* @author jtremeaux
|
||||
*/
|
||||
public class UserDao {
|
||||
/**
|
||||
* Logger.
|
||||
*/
|
||||
private static final Logger log = LoggerFactory.getLogger(UserDao.class);
|
||||
|
||||
/**
|
||||
* Authenticates an user.
|
||||
*
|
||||
@@ -39,7 +49,8 @@ public class UserDao {
|
||||
q.setParameter("username", username);
|
||||
try {
|
||||
User user = (User) q.getSingleResult();
|
||||
if (!BCrypt.checkpw(password, user.getPassword()) || user.getDisableDate() != null) {
|
||||
BCrypt.Result result = BCrypt.verifyer().verify(password.toCharArray(), user.getPassword());
|
||||
if (!result.verified || user.getDisableDate() != null) {
|
||||
return null;
|
||||
}
|
||||
return user;
|
||||
@@ -171,6 +182,26 @@ public class UserDao {
|
||||
return user;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the onboarding status.
|
||||
*
|
||||
* @param user User to update
|
||||
* @return Updated user
|
||||
*/
|
||||
public User updateOnboarding(User user) {
|
||||
EntityManager em = ThreadLocalContext.get().getEntityManager();
|
||||
|
||||
// Get the user
|
||||
Query q = em.createQuery("select u from User u where u.id = :id and u.deleteDate is null");
|
||||
q.setParameter("id", user.getId());
|
||||
User userDb = (User) q.getSingleResult();
|
||||
|
||||
// Update the user
|
||||
userDb.setOnboarding(user.isOnboarding());
|
||||
|
||||
return user;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a user by its ID.
|
||||
*
|
||||
@@ -257,7 +288,21 @@ public class UserDao {
|
||||
* @return Hashed password
|
||||
*/
|
||||
private String hashPassword(String password) {
|
||||
return BCrypt.hashpw(password, BCrypt.gensalt());
|
||||
int bcryptWork = Constants.DEFAULT_BCRYPT_WORK;
|
||||
String envBcryptWork = System.getenv(Constants.BCRYPT_WORK_ENV);
|
||||
if (!Strings.isNullOrEmpty(envBcryptWork)) {
|
||||
try {
|
||||
int envBcryptWorkInt = Integer.parseInt(envBcryptWork);
|
||||
if (envBcryptWorkInt >= 4 && envBcryptWorkInt <= 31) {
|
||||
bcryptWork = envBcryptWorkInt;
|
||||
} else {
|
||||
log.warn(Constants.BCRYPT_WORK_ENV + " needs to be in range 4...31. Falling back to " + Constants.DEFAULT_BCRYPT_WORK + ".");
|
||||
}
|
||||
} catch (NumberFormatException e) {
|
||||
log.warn(Constants.BCRYPT_WORK_ENV + " needs to be a number in range 4...31. Falling back to " + Constants.DEFAULT_BCRYPT_WORK + ".");
|
||||
}
|
||||
}
|
||||
return BCrypt.withDefaults().hashToString(bcryptWork, password.toCharArray());
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -20,7 +20,6 @@ public class VocabularyDao {
|
||||
*
|
||||
* @param vocabulary Vocabulary
|
||||
* @return New ID
|
||||
* @throws Exception
|
||||
*/
|
||||
public String create(Vocabulary vocabulary) {
|
||||
// Create the UUID
|
||||
|
||||
@@ -42,11 +42,9 @@ public class WebhookDao {
|
||||
}
|
||||
criteriaList.add("w.WHK_DELETEDATE_D is null");
|
||||
|
||||
if (!criteriaList.isEmpty()) {
|
||||
sb.append(" where ");
|
||||
sb.append(Joiner.on(" and ").join(criteriaList));
|
||||
}
|
||||
|
||||
sb.append(" where ");
|
||||
sb.append(Joiner.on(" and ").join(criteriaList));
|
||||
|
||||
// Perform the search
|
||||
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);
|
||||
@SuppressWarnings("unchecked")
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
package com.sismics.docs.core.dao.criteria;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
@@ -49,8 +50,14 @@ public class DocumentCriteria {
|
||||
* Tag IDs.
|
||||
* The first level list will be AND'ed and the second level list will be OR'ed.
|
||||
*/
|
||||
private List<List<String>> tagIdList;
|
||||
private List<List<String>> tagIdList = new ArrayList<>();
|
||||
|
||||
/**
|
||||
* Tag IDs to exclude.
|
||||
* The first and second level list will be excluded.
|
||||
*/
|
||||
private List<List<String>> excludedTagIdList = new ArrayList<>();
|
||||
|
||||
/**
|
||||
* Shared status.
|
||||
*/
|
||||
@@ -70,7 +77,17 @@ public class DocumentCriteria {
|
||||
* A route is active.
|
||||
*/
|
||||
private Boolean activeRoute;
|
||||
|
||||
|
||||
/**
|
||||
* MIME type of a file.
|
||||
*/
|
||||
private String mimeType;
|
||||
|
||||
/**
|
||||
* The title.
|
||||
*/
|
||||
private String title;
|
||||
|
||||
public List<String> getTargetIdList() {
|
||||
return targetIdList;
|
||||
}
|
||||
@@ -115,8 +132,8 @@ public class DocumentCriteria {
|
||||
return tagIdList;
|
||||
}
|
||||
|
||||
public void setTagIdList(List<List<String>> tagIdList) {
|
||||
this.tagIdList = tagIdList;
|
||||
public List<List<String>> getExcludedTagIdList() {
|
||||
return excludedTagIdList;
|
||||
}
|
||||
|
||||
public Boolean getShared() {
|
||||
@@ -142,11 +159,7 @@ public class DocumentCriteria {
|
||||
public void setCreatorId(String creatorId) {
|
||||
this.creatorId = creatorId;
|
||||
}
|
||||
|
||||
public Boolean getActiveRoute() {
|
||||
return activeRoute;
|
||||
}
|
||||
|
||||
|
||||
public Date getUpdateDateMin() {
|
||||
return updateDateMin;
|
||||
}
|
||||
@@ -163,7 +176,27 @@ public class DocumentCriteria {
|
||||
this.updateDateMax = updateDateMax;
|
||||
}
|
||||
|
||||
public Boolean getActiveRoute() {
|
||||
return activeRoute;
|
||||
}
|
||||
|
||||
public void setActiveRoute(Boolean activeRoute) {
|
||||
this.activeRoute = activeRoute;
|
||||
}
|
||||
|
||||
public String getMimeType() {
|
||||
return mimeType;
|
||||
}
|
||||
|
||||
public void setMimeType(String mimeType) {
|
||||
this.mimeType = mimeType;
|
||||
}
|
||||
|
||||
public String getTitle() {
|
||||
return title;
|
||||
}
|
||||
|
||||
public void setTitle(String title) {
|
||||
this.title = title;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,9 @@
|
||||
package com.sismics.docs.core.dao.criteria;
|
||||
|
||||
/**
|
||||
* Metadata criteria.
|
||||
*
|
||||
* @author bgamard
|
||||
*/
|
||||
public class MetadataCriteria {
|
||||
}
|
||||
@@ -0,0 +1,94 @@
|
||||
package com.sismics.docs.core.dao.dto;
|
||||
|
||||
import com.sismics.docs.core.constant.MetadataType;
|
||||
|
||||
/**
|
||||
* Document metadata DTO.
|
||||
*
|
||||
* @author bgamard
|
||||
*/
|
||||
public class DocumentMetadataDto {
|
||||
/**
|
||||
* Document metadata ID.
|
||||
*/
|
||||
private String id;
|
||||
|
||||
/**
|
||||
* Document ID.
|
||||
*/
|
||||
private String documentId;
|
||||
|
||||
/**
|
||||
* Metadata ID.
|
||||
*/
|
||||
private String metadataId;
|
||||
|
||||
/**
|
||||
* Name.
|
||||
*/
|
||||
private String name;
|
||||
|
||||
/**
|
||||
* Value.
|
||||
*/
|
||||
private String value;
|
||||
|
||||
/**
|
||||
* Type.
|
||||
*/
|
||||
private MetadataType type;
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public DocumentMetadataDto setId(String id) {
|
||||
this.id = id;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public DocumentMetadataDto setName(String name) {
|
||||
this.name = name;
|
||||
return this;
|
||||
}
|
||||
|
||||
public MetadataType getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public DocumentMetadataDto setType(MetadataType type) {
|
||||
this.type = type;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String getDocumentId() {
|
||||
return documentId;
|
||||
}
|
||||
|
||||
public DocumentMetadataDto setDocumentId(String documentId) {
|
||||
this.documentId = documentId;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String getMetadataId() {
|
||||
return metadataId;
|
||||
}
|
||||
|
||||
public DocumentMetadataDto setMetadataId(String metadataId) {
|
||||
this.metadataId = metadataId;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String getValue() {
|
||||
return value;
|
||||
}
|
||||
|
||||
public DocumentMetadataDto setValue(String value) {
|
||||
this.value = value;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,52 @@
|
||||
package com.sismics.docs.core.dao.dto;
|
||||
|
||||
import com.sismics.docs.core.constant.MetadataType;
|
||||
|
||||
/**
|
||||
* Metadata DTO.
|
||||
*
|
||||
* @author bgamard
|
||||
*/
|
||||
public class MetadataDto {
|
||||
/**
|
||||
* Metadata ID.
|
||||
*/
|
||||
private String id;
|
||||
|
||||
/**
|
||||
* Name.
|
||||
*/
|
||||
private String name;
|
||||
|
||||
/**
|
||||
* Type.
|
||||
*/
|
||||
private MetadataType type;
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public MetadataDto setId(String id) {
|
||||
this.id = id;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public MetadataDto setName(String name) {
|
||||
this.name = name;
|
||||
return this;
|
||||
}
|
||||
|
||||
public MetadataType getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public MetadataDto setType(MetadataType type) {
|
||||
this.type = type;
|
||||
return this;
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,6 @@
|
||||
package com.sismics.docs.core.event;
|
||||
|
||||
import com.google.common.base.MoreObjects;
|
||||
import com.sismics.docs.core.model.jpa.Document;
|
||||
|
||||
/**
|
||||
* Document created event.
|
||||
@@ -10,32 +9,22 @@ import com.sismics.docs.core.model.jpa.Document;
|
||||
*/
|
||||
public class DocumentCreatedAsyncEvent extends UserEvent {
|
||||
/**
|
||||
* Created document.
|
||||
* Document ID.
|
||||
*/
|
||||
private Document document;
|
||||
|
||||
/**
|
||||
* Getter of document.
|
||||
*
|
||||
* @return the document
|
||||
*/
|
||||
public Document getDocument() {
|
||||
return document;
|
||||
private String documentId;
|
||||
|
||||
public String getDocumentId() {
|
||||
return documentId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Setter of document.
|
||||
*
|
||||
* @param document document
|
||||
*/
|
||||
public void setDocument(Document document) {
|
||||
this.document = document;
|
||||
public void setDocumentId(String documentId) {
|
||||
this.documentId = documentId;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return MoreObjects.toStringHelper(this)
|
||||
.add("document", document)
|
||||
.toString();
|
||||
.add("documentId", documentId)
|
||||
.toString();
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,6 @@
|
||||
package com.sismics.docs.core.event;
|
||||
|
||||
import com.google.common.base.MoreObjects;
|
||||
import com.sismics.docs.core.model.jpa.File;
|
||||
|
||||
/**
|
||||
* File deleted event.
|
||||
@@ -10,22 +9,22 @@ import com.sismics.docs.core.model.jpa.File;
|
||||
*/
|
||||
public class FileDeletedAsyncEvent extends UserEvent {
|
||||
/**
|
||||
* Deleted file.
|
||||
* File ID.
|
||||
*/
|
||||
private File file;
|
||||
|
||||
public File getFile() {
|
||||
return file;
|
||||
private String fileId;
|
||||
|
||||
public String getFileId() {
|
||||
return fileId;
|
||||
}
|
||||
|
||||
public void setFile(File file) {
|
||||
this.file = file;
|
||||
public void setFileId(String fileId) {
|
||||
this.fileId = fileId;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return MoreObjects.toStringHelper(this)
|
||||
.add("file", file)
|
||||
.add("fileId", fileId)
|
||||
.toString();
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,6 @@
|
||||
package com.sismics.docs.core.event;
|
||||
|
||||
import com.google.common.base.MoreObjects;
|
||||
import com.sismics.docs.core.model.jpa.File;
|
||||
|
||||
import java.nio.file.Path;
|
||||
|
||||
@@ -12,9 +11,9 @@ import java.nio.file.Path;
|
||||
*/
|
||||
public abstract class FileEvent extends UserEvent {
|
||||
/**
|
||||
* Created file.
|
||||
* File ID.
|
||||
*/
|
||||
private File file;
|
||||
private String fileId;
|
||||
|
||||
/**
|
||||
* Language of the file.
|
||||
@@ -25,15 +24,15 @@ public abstract class FileEvent extends UserEvent {
|
||||
* Unencrypted original file.
|
||||
*/
|
||||
private Path unencryptedFile;
|
||||
|
||||
public File getFile() {
|
||||
return file;
|
||||
|
||||
public String getFileId() {
|
||||
return fileId;
|
||||
}
|
||||
|
||||
public void setFile(File file) {
|
||||
this.file = file;
|
||||
public void setFileId(String fileId) {
|
||||
this.fileId = fileId;
|
||||
}
|
||||
|
||||
|
||||
public String getLanguage() {
|
||||
return language;
|
||||
}
|
||||
@@ -54,7 +53,7 @@ public abstract class FileEvent extends UserEvent {
|
||||
@Override
|
||||
public String toString() {
|
||||
return MoreObjects.toStringHelper(this)
|
||||
.add("file", file)
|
||||
.add("fileId", fileId)
|
||||
.add("language", language)
|
||||
.toString();
|
||||
}
|
||||
|
||||
@@ -3,9 +3,11 @@ package com.sismics.docs.core.listener.async;
|
||||
import com.google.common.eventbus.AllowConcurrentEvents;
|
||||
import com.google.common.eventbus.Subscribe;
|
||||
import com.sismics.docs.core.dao.ContributorDao;
|
||||
import com.sismics.docs.core.dao.DocumentDao;
|
||||
import com.sismics.docs.core.event.DocumentCreatedAsyncEvent;
|
||||
import com.sismics.docs.core.model.context.AppContext;
|
||||
import com.sismics.docs.core.model.jpa.Contributor;
|
||||
import com.sismics.docs.core.model.jpa.Document;
|
||||
import com.sismics.docs.core.util.TransactionUtil;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@@ -34,15 +36,22 @@ public class DocumentCreatedAsyncListener {
|
||||
}
|
||||
|
||||
TransactionUtil.handle(() -> {
|
||||
// Fetch a fresh document
|
||||
Document document = new DocumentDao().getById(event.getDocumentId());
|
||||
if (document == null) {
|
||||
// The document has been deleted since
|
||||
return;
|
||||
}
|
||||
|
||||
// Add the first contributor (the creator of the document)
|
||||
ContributorDao contributorDao = new ContributorDao();
|
||||
Contributor contributor = new Contributor();
|
||||
contributor.setDocumentId(event.getDocument().getId());
|
||||
contributor.setDocumentId(event.getDocumentId());
|
||||
contributor.setUserId(event.getUserId());
|
||||
contributorDao.create(contributor);
|
||||
|
||||
// Update index
|
||||
AppContext.getInstance().getIndexingHandler().createDocument(event.getDocument());
|
||||
AppContext.getInstance().getIndexingHandler().createDocument(document);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -58,7 +58,7 @@ public class DocumentUpdatedAsyncListener {
|
||||
}
|
||||
|
||||
// Update database and index
|
||||
documentDao.updateSilently(document);
|
||||
documentDao.updateFileId(document);
|
||||
AppContext.getInstance().getIndexingHandler().updateDocument(document);
|
||||
|
||||
// Update contributors list
|
||||
|
||||
@@ -4,7 +4,6 @@ import com.google.common.eventbus.AllowConcurrentEvents;
|
||||
import com.google.common.eventbus.Subscribe;
|
||||
import com.sismics.docs.core.event.FileDeletedAsyncEvent;
|
||||
import com.sismics.docs.core.model.context.AppContext;
|
||||
import com.sismics.docs.core.model.jpa.File;
|
||||
import com.sismics.docs.core.util.FileUtil;
|
||||
import com.sismics.docs.core.util.TransactionUtil;
|
||||
import org.slf4j.Logger;
|
||||
@@ -35,12 +34,11 @@ public class FileDeletedAsyncListener {
|
||||
}
|
||||
|
||||
// Delete the file from storage
|
||||
File file = event.getFile();
|
||||
FileUtil.delete(file);
|
||||
FileUtil.delete(event.getFileId());
|
||||
|
||||
TransactionUtil.handle(() -> {
|
||||
// Update index
|
||||
AppContext.getInstance().getIndexingHandler().deleteDocument(file.getId());
|
||||
AppContext.getInstance().getIndexingHandler().deleteDocument(event.getFileId());
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -28,6 +28,7 @@ import java.io.OutputStream;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.text.MessageFormat;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
/**
|
||||
* Listener on file processing.
|
||||
@@ -52,15 +53,7 @@ public class FileProcessingAsyncListener {
|
||||
log.info("File created event: " + event.toString());
|
||||
}
|
||||
|
||||
TransactionUtil.handle(() -> {
|
||||
// Generate thumbnail, extract content
|
||||
processFile(event);
|
||||
|
||||
// Update index
|
||||
AppContext.getInstance().getIndexingHandler().createFile(event.getFile());
|
||||
});
|
||||
|
||||
FileUtil.endProcessingFile(event.getFile().getId());
|
||||
processFile(event, true);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -71,43 +64,84 @@ public class FileProcessingAsyncListener {
|
||||
@Subscribe
|
||||
@AllowConcurrentEvents
|
||||
public void on(final FileUpdatedAsyncEvent event) {
|
||||
if (log.isInfoEnabled()) {
|
||||
log.info("File updated event: " + event.toString());
|
||||
}
|
||||
log.info("File updated event: " + event.toString());
|
||||
|
||||
TransactionUtil.handle(() -> {
|
||||
// Generate thumbnail, extract content
|
||||
processFile(event);
|
||||
|
||||
// Update index
|
||||
AppContext.getInstance().getIndexingHandler().updateFile(event.getFile());
|
||||
});
|
||||
|
||||
FileUtil.endProcessingFile(event.getFile().getId());
|
||||
processFile(event, false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Process the file (create/update).
|
||||
* Process a file :
|
||||
* Generate thumbnails
|
||||
* Extract and save text content
|
||||
*
|
||||
* @param event File event
|
||||
* @param isFileCreated True if the file was just created
|
||||
*/
|
||||
private void processFile(FileEvent event) {
|
||||
// Find a format handler
|
||||
final File file = event.getFile();
|
||||
FormatHandler formatHandler = FormatHandlerUtil.find(file.getMimeType());
|
||||
if (formatHandler == null) {
|
||||
log.error("Format unhandled: " + file.getMimeType());
|
||||
FileUtil.endProcessingFile(file.getId());
|
||||
private void processFile(FileEvent event, boolean isFileCreated) {
|
||||
AtomicReference<File> file = new AtomicReference<>();
|
||||
AtomicReference<User> user = new AtomicReference<>();
|
||||
|
||||
// Open a first transaction to get what we need to start the processing
|
||||
TransactionUtil.handle(() -> {
|
||||
// Generate thumbnail, extract content
|
||||
file.set(new FileDao().getActiveById(event.getFileId()));
|
||||
if (file.get() == null) {
|
||||
// The file has been deleted since
|
||||
return;
|
||||
}
|
||||
|
||||
// Get the creating user from the database for its private key
|
||||
UserDao userDao = new UserDao();
|
||||
user.set(userDao.getById(file.get().getUserId()));
|
||||
});
|
||||
|
||||
// Process the file outside of a transaction
|
||||
if (user.get() == null || file.get() == null) {
|
||||
// The user or file has been deleted
|
||||
FileUtil.endProcessingFile(event.getFileId());
|
||||
return;
|
||||
}
|
||||
String content = extractContent(event, user.get(), file.get());
|
||||
|
||||
// Get the user from the database
|
||||
UserDao userDao = new UserDao();
|
||||
User user = userDao.getById(event.getUserId());
|
||||
if (user == null) {
|
||||
// The user has been deleted meanwhile
|
||||
FileUtil.endProcessingFile(file.getId());
|
||||
return;
|
||||
// Open a new transaction to save the file content
|
||||
TransactionUtil.handle(() -> {
|
||||
// Save the file to database
|
||||
FileDao fileDao = new FileDao();
|
||||
File freshFile = fileDao.getActiveById(event.getFileId());
|
||||
if (freshFile == null) {
|
||||
// The file has been deleted since the text extraction started, ignore the result
|
||||
return;
|
||||
}
|
||||
|
||||
freshFile.setContent(content);
|
||||
fileDao.update(freshFile);
|
||||
|
||||
// Update index with the updated file
|
||||
if (isFileCreated) {
|
||||
AppContext.getInstance().getIndexingHandler().createFile(freshFile);
|
||||
} else {
|
||||
AppContext.getInstance().getIndexingHandler().updateFile(freshFile);
|
||||
}
|
||||
});
|
||||
|
||||
FileUtil.endProcessingFile(event.getFileId());
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract text content from a file.
|
||||
* This is executed outside of a transaction.
|
||||
*
|
||||
* @param event File event
|
||||
* @param user User whom created the file
|
||||
* @param file Fresh file
|
||||
* @return Text content
|
||||
*/
|
||||
private String extractContent(FileEvent event, User user, File file) {
|
||||
// Find a format handler
|
||||
FormatHandler formatHandler = FormatHandlerUtil.find(file.getMimeType());
|
||||
if (formatHandler == null) {
|
||||
log.info("Format unhandled: " + file.getMimeType());
|
||||
return null;
|
||||
}
|
||||
|
||||
// Generate file variations
|
||||
@@ -132,28 +166,21 @@ public class FileProcessingAsyncListener {
|
||||
ImageUtil.writeJpeg(thumbnail, outputStream);
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.error("Unable to generate thumbnails", e);
|
||||
} catch (Throwable e) {
|
||||
log.error("Unable to generate thumbnails for: " + file, e);
|
||||
}
|
||||
|
||||
// Extract text content from the file
|
||||
long startTime = System.currentTimeMillis();
|
||||
String content = null;
|
||||
log.info("Start extracting content from: " + file);
|
||||
try {
|
||||
content = formatHandler.extractContent(event.getLanguage(), event.getUnencryptedFile());
|
||||
} catch (Exception e) {
|
||||
log.error("Error extracting content from: " + event.getFile(), e);
|
||||
} catch (Throwable e) {
|
||||
log.error("Error extracting content from: " + file, e);
|
||||
}
|
||||
log.info(MessageFormat.format("File content extracted in {0}ms", System.currentTimeMillis() - startTime));
|
||||
log.info(MessageFormat.format("File content extracted in {0}ms: " + file.getId(), System.currentTimeMillis() - startTime));
|
||||
|
||||
// Save the file to database
|
||||
FileDao fileDao = new FileDao();
|
||||
if (fileDao.getActiveById(file.getId()) == null) {
|
||||
// The file has been deleted since the text extraction started, ignore the result
|
||||
return;
|
||||
}
|
||||
|
||||
file.setContent(content);
|
||||
fileDao.update(file);
|
||||
return content;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -36,7 +36,7 @@ public class WebhookAsyncListener {
|
||||
@Subscribe
|
||||
@AllowConcurrentEvents
|
||||
public void on(final DocumentCreatedAsyncEvent event) {
|
||||
triggerWebhook(WebhookEvent.DOCUMENT_CREATED, event.getDocument().getId());
|
||||
triggerWebhook(WebhookEvent.DOCUMENT_CREATED, event.getDocumentId());
|
||||
}
|
||||
|
||||
@Subscribe
|
||||
@@ -54,19 +54,19 @@ public class WebhookAsyncListener {
|
||||
@Subscribe
|
||||
@AllowConcurrentEvents
|
||||
public void on(final FileCreatedAsyncEvent event) {
|
||||
triggerWebhook(WebhookEvent.FILE_CREATED, event.getFile().getId());
|
||||
triggerWebhook(WebhookEvent.FILE_CREATED, event.getFileId());
|
||||
}
|
||||
|
||||
@Subscribe
|
||||
@AllowConcurrentEvents
|
||||
public void on(final FileUpdatedAsyncEvent event) {
|
||||
triggerWebhook(WebhookEvent.FILE_UPDATED, event.getFile().getId());
|
||||
triggerWebhook(WebhookEvent.FILE_UPDATED, event.getFileId());
|
||||
}
|
||||
|
||||
@Subscribe
|
||||
@AllowConcurrentEvents
|
||||
public void on(final FileDeletedAsyncEvent event) {
|
||||
triggerWebhook(WebhookEvent.FILE_DELETED, event.getFile().getId());
|
||||
triggerWebhook(WebhookEvent.FILE_DELETED, event.getFileId());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -86,7 +86,7 @@ public class WebhookAsyncListener {
|
||||
}
|
||||
});
|
||||
|
||||
RequestBody body = RequestBody.create(JSON, "{\"event\": \"" + event.name() + "\", \"id\": \"" + id + "\"}");
|
||||
RequestBody body = RequestBody.create("{\"event\": \"" + event.name() + "\", \"id\": \"" + id + "\"}", JSON);
|
||||
|
||||
for (String webhookUrl : webhookUrlList) {
|
||||
Request request = new Request.Builder()
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
package com.sismics.docs.core.model.context;
|
||||
|
||||
import com.google.common.base.Strings;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.eventbus.AsyncEventBus;
|
||||
import com.google.common.eventbus.EventBus;
|
||||
import com.sismics.docs.core.constant.Constants;
|
||||
import com.sismics.docs.core.dao.UserDao;
|
||||
import com.sismics.docs.core.event.RebuildIndexAsyncEvent;
|
||||
import com.sismics.docs.core.listener.async.*;
|
||||
import com.sismics.docs.core.model.jpa.User;
|
||||
import com.sismics.docs.core.service.FileService;
|
||||
@@ -81,7 +81,7 @@ public class AppContext {
|
||||
List<Class<? extends IndexingHandler>> indexingHandlerList = Lists.newArrayList(
|
||||
new ClasspathScanner<IndexingHandler>().findClasses(IndexingHandler.class, "com.sismics.docs.core.util.indexing"));
|
||||
for (Class<? extends IndexingHandler> handlerClass : indexingHandlerList) {
|
||||
IndexingHandler handler = handlerClass.newInstance();
|
||||
IndexingHandler handler = handlerClass.getDeclaredConstructor().newInstance();
|
||||
if (handler.accept()) {
|
||||
indexingHandler = handler;
|
||||
break;
|
||||
@@ -107,7 +107,7 @@ public class AppContext {
|
||||
|
||||
// Change the admin password if needed
|
||||
String envAdminPassword = System.getenv(Constants.ADMIN_PASSWORD_INIT_ENV);
|
||||
if (envAdminPassword != null) {
|
||||
if (!Strings.isNullOrEmpty(envAdminPassword)) {
|
||||
UserDao userDao = new UserDao();
|
||||
User adminUser = userDao.getById("admin");
|
||||
if (Constants.DEFAULT_ADMIN_PASSWORD.equals(adminUser.getPassword())) {
|
||||
@@ -118,7 +118,7 @@ public class AppContext {
|
||||
|
||||
// Change the admin email if needed
|
||||
String envAdminEmail = System.getenv(Constants.ADMIN_EMAIL_INIT_ENV);
|
||||
if (envAdminEmail != null) {
|
||||
if (!Strings.isNullOrEmpty(envAdminEmail)) {
|
||||
UserDao userDao = new UserDao();
|
||||
User adminUser = userDao.getById("admin");
|
||||
if (Constants.DEFAULT_ADMIN_EMAIL.equals(adminUser.getEmail())) {
|
||||
@@ -172,7 +172,8 @@ public class AppContext {
|
||||
if (EnvironmentUtil.isUnitTest()) {
|
||||
return new EventBus();
|
||||
} else {
|
||||
ThreadPoolExecutor executor = new ThreadPoolExecutor(8, 8,
|
||||
int threadCount = Math.max(Runtime.getRuntime().availableProcessors() / 2, 2);
|
||||
ThreadPoolExecutor executor = new ThreadPoolExecutor(threadCount, threadCount,
|
||||
1L, TimeUnit.MINUTES,
|
||||
new LinkedBlockingQueue<>());
|
||||
asyncExecutorList.add(executor);
|
||||
|
||||
@@ -0,0 +1,91 @@
|
||||
package com.sismics.docs.core.model.jpa;
|
||||
|
||||
import com.google.common.base.MoreObjects;
|
||||
|
||||
import javax.persistence.Column;
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.Id;
|
||||
import javax.persistence.Table;
|
||||
import java.io.Serializable;
|
||||
|
||||
/**
|
||||
* Link between a document and a metadata, holding the value.
|
||||
*
|
||||
* @author bgamard
|
||||
*/
|
||||
@Entity
|
||||
@Table(name = "T_DOCUMENT_METADATA")
|
||||
public class DocumentMetadata implements Serializable {
|
||||
/**
|
||||
* Serial version UID.
|
||||
*/
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
/**
|
||||
* Document metadata ID.
|
||||
*/
|
||||
@Id
|
||||
@Column(name = "DME_ID_C", length = 36)
|
||||
private String id;
|
||||
|
||||
/**
|
||||
* Document ID.
|
||||
*/
|
||||
@Column(name = "DME_IDDOCUMENT_C", nullable = false, length = 36)
|
||||
private String documentId;
|
||||
|
||||
/**
|
||||
* Metadata ID.
|
||||
*/
|
||||
@Column(name = "DME_IDMETADATA_C", nullable = false, length = 36)
|
||||
private String metadataId;
|
||||
|
||||
/**
|
||||
* Value.
|
||||
*/
|
||||
@Column(name = "DME_VALUE_C", length = 4000)
|
||||
private String value;
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(String id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public String getDocumentId() {
|
||||
return documentId;
|
||||
}
|
||||
|
||||
public void setDocumentId(String documentId) {
|
||||
this.documentId = documentId;
|
||||
}
|
||||
|
||||
public String getMetadataId() {
|
||||
return metadataId;
|
||||
}
|
||||
|
||||
public DocumentMetadata setMetadataId(String metadataId) {
|
||||
this.metadataId = metadataId;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String getValue() {
|
||||
return value;
|
||||
}
|
||||
|
||||
public DocumentMetadata setValue(String value) {
|
||||
this.value = value;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return MoreObjects.toStringHelper(this)
|
||||
.add("id", id)
|
||||
.add("documentId", documentId)
|
||||
.add("metadataId", metadataId)
|
||||
.toString();
|
||||
}
|
||||
}
|
||||
@@ -49,7 +49,6 @@ public class File implements Loggable {
|
||||
/**
|
||||
* OCR-ized content.
|
||||
*/
|
||||
@Lob
|
||||
@Column(name = "FIL_CONTENT_C")
|
||||
private String content;
|
||||
|
||||
|
||||
@@ -0,0 +1,92 @@
|
||||
package com.sismics.docs.core.model.jpa;
|
||||
|
||||
import com.google.common.base.MoreObjects;
|
||||
import com.sismics.docs.core.constant.MetadataType;
|
||||
|
||||
import javax.persistence.*;
|
||||
import java.util.Date;
|
||||
|
||||
/**
|
||||
* Metadata entity.
|
||||
*
|
||||
* @author bgamard
|
||||
*/
|
||||
@Entity
|
||||
@Table(name = "T_METADATA")
|
||||
public class Metadata implements Loggable {
|
||||
/**
|
||||
* Metadata ID.
|
||||
*/
|
||||
@Id
|
||||
@Column(name = "MET_ID_C", length = 36)
|
||||
private String id;
|
||||
|
||||
/**
|
||||
* Name.
|
||||
*/
|
||||
@Column(name = "MET_NAME_C", length = 50, nullable = false)
|
||||
private String name;
|
||||
|
||||
/**
|
||||
* Type.
|
||||
*/
|
||||
@Column(name = "MET_TYPE_C", length = 20, nullable = false)
|
||||
@Enumerated(EnumType.STRING)
|
||||
private MetadataType type;
|
||||
|
||||
/**
|
||||
* Deletion date.
|
||||
*/
|
||||
@Column(name = "MET_DELETEDATE_D")
|
||||
private Date deleteDate;
|
||||
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public Metadata setId(String id) {
|
||||
this.id = id;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public Metadata setName(String name) {
|
||||
this.name = name;
|
||||
return this;
|
||||
}
|
||||
|
||||
public MetadataType getType() {
|
||||
return type;
|
||||
}
|
||||
|
||||
public Metadata setType(MetadataType type) {
|
||||
this.type = type;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Date getDeleteDate() {
|
||||
return deleteDate;
|
||||
}
|
||||
|
||||
public void setDeleteDate(Date deleteDate) {
|
||||
this.deleteDate = deleteDate;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return MoreObjects.toStringHelper(this)
|
||||
.add("id", id)
|
||||
.add("name", name)
|
||||
.add("type", type)
|
||||
.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toMessage() {
|
||||
return name;
|
||||
}
|
||||
}
|
||||
@@ -46,7 +46,13 @@ public class User implements Loggable {
|
||||
*/
|
||||
@Column(name = "USE_PRIVATEKEY_C", nullable = false, length = 100)
|
||||
private String privateKey;
|
||||
|
||||
|
||||
/**
|
||||
* False when the user passed the onboarding.
|
||||
*/
|
||||
@Column(name = "USE_ONBOARDING_B", nullable = false)
|
||||
private boolean onboarding;
|
||||
|
||||
/**
|
||||
* TOTP secret key.
|
||||
*/
|
||||
@@ -198,6 +204,15 @@ public class User implements Loggable {
|
||||
return this;
|
||||
}
|
||||
|
||||
public boolean isOnboarding() {
|
||||
return onboarding;
|
||||
}
|
||||
|
||||
public User setOnboarding(boolean onboarding) {
|
||||
this.onboarding = onboarding;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return MoreObjects.toStringHelper(this)
|
||||
|
||||
@@ -69,13 +69,18 @@ public class FileService extends AbstractScheduledService {
|
||||
return Scheduler.newFixedDelaySchedule(0, 5, TimeUnit.SECONDS);
|
||||
}
|
||||
|
||||
public Path createTemporaryFile() throws IOException {
|
||||
return createTemporaryFile(null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a temporary file.
|
||||
*
|
||||
* @param name Wanted file name
|
||||
* @return New temporary file
|
||||
*/
|
||||
public Path createTemporaryFile() throws IOException {
|
||||
Path path = Files.createTempFile("sismics_docs", null);
|
||||
public Path createTemporaryFile(String name) throws IOException {
|
||||
Path path = Files.createTempFile("sismics_docs", name);
|
||||
referenceSet.add(new TemporaryPathReference(path, referenceQueue));
|
||||
return path;
|
||||
}
|
||||
@@ -85,7 +90,7 @@ public class FileService extends AbstractScheduledService {
|
||||
*
|
||||
* @author bgamard
|
||||
*/
|
||||
class TemporaryPathReference extends PhantomReference<Path> {
|
||||
static class TemporaryPathReference extends PhantomReference<Path> {
|
||||
String path;
|
||||
TemporaryPathReference(Path referent, ReferenceQueue<? super Path> q) {
|
||||
super(referent, q);
|
||||
|
||||
@@ -1,16 +1,21 @@
|
||||
package com.sismics.docs.core.service;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.collect.Sets;
|
||||
import com.google.common.util.concurrent.AbstractScheduledService;
|
||||
import com.sismics.docs.core.constant.ConfigType;
|
||||
import com.sismics.docs.core.dao.TagDao;
|
||||
import com.sismics.docs.core.dao.criteria.TagCriteria;
|
||||
import com.sismics.docs.core.dao.dto.TagDto;
|
||||
import com.sismics.docs.core.event.DocumentCreatedAsyncEvent;
|
||||
import com.sismics.docs.core.model.jpa.Config;
|
||||
import com.sismics.docs.core.model.jpa.Document;
|
||||
import com.sismics.docs.core.model.jpa.Tag;
|
||||
import com.sismics.docs.core.util.ConfigUtil;
|
||||
import com.sismics.docs.core.util.DocumentUtil;
|
||||
import com.sismics.docs.core.util.FileUtil;
|
||||
import com.sismics.docs.core.util.TransactionUtil;
|
||||
import com.sismics.docs.core.util.jpa.SortCriteria;
|
||||
import com.sismics.util.EmailUtil;
|
||||
import com.sismics.util.context.ThreadLocalContext;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
@@ -19,9 +24,10 @@ import org.slf4j.LoggerFactory;
|
||||
|
||||
import javax.mail.*;
|
||||
import javax.mail.search.FlagTerm;
|
||||
import java.util.Date;
|
||||
import java.util.Properties;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
* Inbox scanning service.
|
||||
@@ -79,22 +85,25 @@ public class InboxService extends AbstractScheduledService {
|
||||
lastSyncDate = new Date();
|
||||
lastSyncMessageCount = 0;
|
||||
try {
|
||||
Map<String, String> tagsNameToId = getAllTags();
|
||||
|
||||
inbox = openInbox();
|
||||
Message[] messages = inbox.search(new FlagTerm(new Flags(Flags.Flag.SEEN), false));
|
||||
log.info(messages.length + " messages found");
|
||||
for (Message message : messages) {
|
||||
importMessage(message);
|
||||
importMessage(message, tagsNameToId);
|
||||
lastSyncMessageCount++;
|
||||
}
|
||||
} catch (FolderClosedException e) {
|
||||
// Ignore this, we will just continue importing on the next cycle
|
||||
} catch (Exception e) {
|
||||
log.error("Error synching the inbox", e);
|
||||
log.error("Error syncing the inbox", e);
|
||||
lastSyncError = e.getMessage();
|
||||
} finally {
|
||||
try {
|
||||
if (inbox != null) {
|
||||
inbox.close(false);
|
||||
// The parameter controls if the messages flagged to be deleted, should actually get deleted.
|
||||
inbox.close(ConfigUtil.getConfigBooleanValue(ConfigType.INBOX_DELETE_IMPORTED));
|
||||
inbox.getStore().close();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
@@ -172,7 +181,7 @@ public class InboxService extends AbstractScheduledService {
|
||||
store.connect(ConfigUtil.getConfigStringValue(ConfigType.INBOX_USERNAME),
|
||||
ConfigUtil.getConfigStringValue(ConfigType.INBOX_PASSWORD));
|
||||
|
||||
Folder inbox = store.getFolder("INBOX");
|
||||
Folder inbox = store.getFolder(ConfigUtil.getConfigStringValue(ConfigType.INBOX_FOLDER));
|
||||
inbox.open(Folder.READ_WRITE);
|
||||
return inbox;
|
||||
}
|
||||
@@ -183,7 +192,7 @@ public class InboxService extends AbstractScheduledService {
|
||||
* @param message Message
|
||||
* @throws Exception e
|
||||
*/
|
||||
private void importMessage(Message message) throws Exception {
|
||||
private void importMessage(Message message, Map<String, String> tags) throws Exception {
|
||||
log.info("Importing message: " + message.getSubject());
|
||||
|
||||
// Parse the mail
|
||||
@@ -194,12 +203,27 @@ public class InboxService extends AbstractScheduledService {
|
||||
|
||||
// Create the document
|
||||
Document document = new Document();
|
||||
document.setUserId("admin");
|
||||
if (mailContent.getSubject() == null) {
|
||||
document.setTitle("Imported email from EML file");
|
||||
} else {
|
||||
document.setTitle(StringUtils.abbreviate(mailContent.getSubject(), 100));
|
||||
String subject = mailContent.getSubject();
|
||||
if (subject == null) {
|
||||
subject = "Imported email from EML file";
|
||||
}
|
||||
|
||||
HashSet<String> tagsFound = new HashSet<>();
|
||||
if (tags != null) {
|
||||
Pattern pattern = Pattern.compile("#([^\\s:#]+)");
|
||||
Matcher matcher = pattern.matcher(subject);
|
||||
while (matcher.find()) {
|
||||
if (tags.containsKey(matcher.group(1)) && tags.get(matcher.group(1)) != null) {
|
||||
tagsFound.add(tags.get(matcher.group(1)));
|
||||
subject = subject.replaceFirst("#" + matcher.group(1), "");
|
||||
}
|
||||
}
|
||||
log.debug("Tags found: " + String.join(", ", tagsFound));
|
||||
subject = subject.trim().replaceAll(" +", " ");
|
||||
}
|
||||
|
||||
document.setUserId("admin");
|
||||
document.setTitle(StringUtils.abbreviate(subject, 100));
|
||||
document.setDescription(StringUtils.abbreviate(mailContent.getMessage(), 4000));
|
||||
document.setSubject(StringUtils.abbreviate(mailContent.getSubject(), 500));
|
||||
document.setFormat("EML");
|
||||
@@ -220,14 +244,19 @@ public class InboxService extends AbstractScheduledService {
|
||||
TagDao tagDao = new TagDao();
|
||||
Tag tag = tagDao.getById(tagId);
|
||||
if (tag != null) {
|
||||
tagDao.updateTagList(document.getId(), Sets.newHashSet(tagId));
|
||||
tagsFound.add(tagId);
|
||||
}
|
||||
}
|
||||
|
||||
// Update tags
|
||||
if (!tagsFound.isEmpty()) {
|
||||
new TagDao().updateTagList(document.getId(), tagsFound);
|
||||
}
|
||||
|
||||
// Raise a document created event
|
||||
DocumentCreatedAsyncEvent documentCreatedAsyncEvent = new DocumentCreatedAsyncEvent();
|
||||
documentCreatedAsyncEvent.setUserId("admin");
|
||||
documentCreatedAsyncEvent.setDocument(document);
|
||||
documentCreatedAsyncEvent.setDocumentId(document.getId());
|
||||
ThreadLocalContext.get().addAsyncEvent(documentCreatedAsyncEvent);
|
||||
|
||||
// Add files to the document
|
||||
@@ -235,6 +264,29 @@ public class InboxService extends AbstractScheduledService {
|
||||
FileUtil.createFile(fileContent.getName(), null, fileContent.getFile(), fileContent.getSize(),
|
||||
document.getLanguage(), "admin", document.getId());
|
||||
}
|
||||
|
||||
if (ConfigUtil.getConfigBooleanValue(ConfigType.INBOX_DELETE_IMPORTED)) {
|
||||
message.setFlag(Flags.Flag.DELETED, true);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches a HashMap with all tag names as keys and their respective ids as values.
|
||||
*
|
||||
* @return Map with all tags or null if not enabled
|
||||
*/
|
||||
private Map<String, String> getAllTags() {
|
||||
if (!ConfigUtil.getConfigBooleanValue(ConfigType.INBOX_AUTOMATIC_TAGS)) {
|
||||
return null;
|
||||
}
|
||||
TagDao tagDao = new TagDao();
|
||||
List<TagDto> tags = tagDao.findByCriteria(new TagCriteria().setTargetIdList(null), new SortCriteria(1, true));
|
||||
|
||||
Map<String, String> tagsNameToId = new HashMap<>();
|
||||
for (TagDto tagDto : tags) {
|
||||
tagsNameToId.put(tagDto.getName(), tagDto.getId());
|
||||
}
|
||||
return tagsNameToId;
|
||||
}
|
||||
|
||||
public Date getLastSyncDate() {
|
||||
|
||||
@@ -50,6 +50,19 @@ public class ConfigUtil {
|
||||
return Integer.parseInt(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the long value of a configuration parameter.
|
||||
*
|
||||
* @param configType Type of the configuration parameter
|
||||
* @return Long value of the configuration parameter
|
||||
* @throws IllegalStateException Configuration parameter undefined
|
||||
*/
|
||||
public static long getConfigLongValue(ConfigType configType) {
|
||||
String value = getConfigStringValue(configType);
|
||||
|
||||
return Long.parseLong(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the boolean value of a configuration parameter.
|
||||
*
|
||||
|
||||
@@ -22,9 +22,9 @@ public class DirectoryUtil {
|
||||
*/
|
||||
public static Path getBaseDataDirectory() {
|
||||
Path baseDataDir = null;
|
||||
if (StringUtils.isNotBlank(EnvironmentUtil.getDocsHome())) {
|
||||
if (StringUtils.isNotBlank(EnvironmentUtil.getTeedyHome())) {
|
||||
// If the docs.home property is set then use it
|
||||
baseDataDir = Paths.get(EnvironmentUtil.getDocsHome());
|
||||
baseDataDir = Paths.get(EnvironmentUtil.getTeedyHome());
|
||||
} else if (EnvironmentUtil.isUnitTest()) {
|
||||
// For unit testing, use a temporary directory
|
||||
baseDataDir = Paths.get(System.getProperty("java.io.tmpdir"));
|
||||
|
||||
@@ -32,6 +32,7 @@ public class EncryptionUtil {
|
||||
static {
|
||||
// Initialize Bouncy Castle provider
|
||||
Security.insertProviderAt(new BouncyCastleProvider(), 1);
|
||||
Security.removeProvider("SunRsaSign");
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
package com.sismics.docs.core.util;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import com.google.common.base.Strings;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.io.CharStreams;
|
||||
@@ -18,6 +17,8 @@ import com.sismics.util.context.ThreadLocalContext;
|
||||
import com.sismics.util.io.InputStreamReaderThread;
|
||||
import com.sismics.util.mime.MimeTypeUtil;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import javax.crypto.Cipher;
|
||||
import javax.crypto.CipherInputStream;
|
||||
@@ -26,6 +27,7 @@ import java.awt.image.BufferedImage;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.*;
|
||||
@@ -36,6 +38,11 @@ import java.util.*;
|
||||
* @author bgamard
|
||||
*/
|
||||
public class FileUtil {
|
||||
/**
|
||||
* Logger.
|
||||
*/
|
||||
private static final Logger log = LoggerFactory.getLogger(FileUtil.class);
|
||||
|
||||
/**
|
||||
* File ID of files currently being processed.
|
||||
*/
|
||||
@@ -69,19 +76,19 @@ public class FileUtil {
|
||||
|
||||
// Consume the data as text
|
||||
try (InputStream is = process.getInputStream()) {
|
||||
return CharStreams.toString(new InputStreamReader(is, Charsets.UTF_8));
|
||||
return CharStreams.toString(new InputStreamReader(is, StandardCharsets.UTF_8));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a file from the storage filesystem.
|
||||
*
|
||||
* @param file File to delete
|
||||
* @param fileId ID of file to delete
|
||||
*/
|
||||
public static void delete(File file) throws IOException {
|
||||
Path storedFile = DirectoryUtil.getStorageDirectory().resolve(file.getId());
|
||||
Path webFile = DirectoryUtil.getStorageDirectory().resolve(file.getId() + "_web");
|
||||
Path thumbnailFile = DirectoryUtil.getStorageDirectory().resolve(file.getId() + "_thumb");
|
||||
public static void delete(String fileId) throws IOException {
|
||||
Path storedFile = DirectoryUtil.getStorageDirectory().resolve(fileId);
|
||||
Path webFile = DirectoryUtil.getStorageDirectory().resolve(fileId + "_web");
|
||||
Path thumbnailFile = DirectoryUtil.getStorageDirectory().resolve(fileId + "_thumb");
|
||||
|
||||
if (Files.exists(storedFile)) {
|
||||
Files.delete(storedFile);
|
||||
@@ -126,7 +133,7 @@ public class FileUtil {
|
||||
// Validate global quota
|
||||
String globalStorageQuotaStr = System.getenv(Constants.GLOBAL_QUOTA_ENV);
|
||||
if (!Strings.isNullOrEmpty(globalStorageQuotaStr)) {
|
||||
long globalStorageQuota = Long.valueOf(globalStorageQuotaStr);
|
||||
long globalStorageQuota = Long.parseLong(globalStorageQuotaStr);
|
||||
long globalStorageCurrent = userDao.getGlobalStorageCurrent();
|
||||
if (globalStorageCurrent + fileSize > globalStorageQuota) {
|
||||
throw new IOException("QuotaReached");
|
||||
@@ -190,7 +197,7 @@ public class FileUtil {
|
||||
FileCreatedAsyncEvent fileCreatedAsyncEvent = new FileCreatedAsyncEvent();
|
||||
fileCreatedAsyncEvent.setUserId(userId);
|
||||
fileCreatedAsyncEvent.setLanguage(language);
|
||||
fileCreatedAsyncEvent.setFile(file);
|
||||
fileCreatedAsyncEvent.setFileId(file.getId());
|
||||
fileCreatedAsyncEvent.setUnencryptedFile(unencryptedFile);
|
||||
ThreadLocalContext.get().addAsyncEvent(fileCreatedAsyncEvent);
|
||||
|
||||
@@ -211,6 +218,7 @@ public class FileUtil {
|
||||
*/
|
||||
public static void startProcessingFile(String fileId) {
|
||||
processingFileSet.add(fileId);
|
||||
log.info("Processing started for file: " + fileId);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -220,6 +228,7 @@ public class FileUtil {
|
||||
*/
|
||||
public static void endProcessingFile(String fileId) {
|
||||
processingFileSet.remove(fileId);
|
||||
log.info("Processing ended for file: " + fileId);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -0,0 +1,196 @@
|
||||
package com.sismics.docs.core.util;
|
||||
|
||||
import com.google.common.collect.Maps;
|
||||
import com.sismics.docs.core.constant.MetadataType;
|
||||
import com.sismics.docs.core.dao.DocumentMetadataDao;
|
||||
import com.sismics.docs.core.dao.MetadataDao;
|
||||
import com.sismics.docs.core.dao.criteria.MetadataCriteria;
|
||||
import com.sismics.docs.core.dao.dto.DocumentMetadataDto;
|
||||
import com.sismics.docs.core.dao.dto.MetadataDto;
|
||||
import com.sismics.docs.core.model.jpa.DocumentMetadata;
|
||||
import com.sismics.docs.core.util.jpa.SortCriteria;
|
||||
|
||||
import javax.json.Json;
|
||||
import javax.json.JsonArrayBuilder;
|
||||
import javax.json.JsonObjectBuilder;
|
||||
import java.text.MessageFormat;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Metadata utilities.
|
||||
*
|
||||
* @author bgamard
|
||||
*/
|
||||
public class MetadataUtil {
|
||||
/**
|
||||
* Update custom metadata on a document.
|
||||
*
|
||||
* @param documentId Document ID
|
||||
* @param metadataIdList Metadata ID list
|
||||
* @param metadataValueList Metadata value list
|
||||
*/
|
||||
public static void updateMetadata(String documentId, List<String> metadataIdList, List<String> metadataValueList) throws Exception {
|
||||
if (metadataIdList == null || metadataValueList == null || metadataIdList.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
if (metadataIdList.size() != metadataValueList.size()) {
|
||||
throw new Exception("metadata_id and metadata_value must have the same length");
|
||||
}
|
||||
|
||||
Map<String, String> newValues = Maps.newHashMap();
|
||||
for (int i = 0; i < metadataIdList.size(); i++) {
|
||||
newValues.put(metadataIdList.get(i), metadataValueList.get(i));
|
||||
}
|
||||
|
||||
MetadataDao metadataDao = new MetadataDao();
|
||||
DocumentMetadataDao documentMetadataDao = new DocumentMetadataDao();
|
||||
List<MetadataDto> metadataDtoList = metadataDao.findByCriteria(new MetadataCriteria(), null);
|
||||
List<DocumentMetadataDto> documentMetadataDtoList = documentMetadataDao.getByDocumentId(documentId);
|
||||
|
||||
// Update existing values
|
||||
for (DocumentMetadataDto documentMetadataDto : documentMetadataDtoList) {
|
||||
if (newValues.containsKey(documentMetadataDto.getMetadataId())) {
|
||||
// Update the value
|
||||
String value = newValues.get(documentMetadataDto.getMetadataId());
|
||||
validateValue(documentMetadataDto.getType(), value);
|
||||
updateValue(documentMetadataDto.getId(), value);
|
||||
newValues.remove(documentMetadataDto.getMetadataId());
|
||||
} else {
|
||||
// Remove the value
|
||||
updateValue(documentMetadataDto.getId(), null);
|
||||
}
|
||||
}
|
||||
|
||||
// Create new values
|
||||
for (Map.Entry<String, String> entry : newValues.entrySet()) {
|
||||
// Search the metadata definition
|
||||
MetadataDto metadata = null;
|
||||
for (MetadataDto metadataDto : metadataDtoList) {
|
||||
if (metadataDto.getId().equals(entry.getKey())) {
|
||||
metadata = metadataDto;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (metadata == null) {
|
||||
throw new Exception(MessageFormat.format("Metadata not found: {0}", entry.getKey()));
|
||||
}
|
||||
|
||||
// Add the value
|
||||
validateValue(metadata.getType(), entry.getValue());
|
||||
createValue(documentId, entry.getKey(), entry.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate a custom metadata value.
|
||||
*
|
||||
* @param type Metadata type
|
||||
* @param value Value
|
||||
* @throws Exception In case of validation error
|
||||
*/
|
||||
private static void validateValue(MetadataType type, String value) throws Exception {
|
||||
switch (type) {
|
||||
case STRING:
|
||||
case BOOLEAN:
|
||||
return;
|
||||
case DATE:
|
||||
try {
|
||||
Long.parseLong(value);
|
||||
} catch (NumberFormatException e) {
|
||||
throw new Exception("Date value not parsable as timestamp");
|
||||
}
|
||||
break;
|
||||
case FLOAT:
|
||||
try {
|
||||
Double.parseDouble(value);
|
||||
} catch (NumberFormatException e) {
|
||||
throw new Exception("Float value not parsable");
|
||||
}
|
||||
break;
|
||||
case INTEGER:
|
||||
try {
|
||||
Integer.parseInt(value);
|
||||
} catch (NumberFormatException e) {
|
||||
throw new Exception("Integer value not parsable");
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a custom metadata value on a document.
|
||||
*
|
||||
* @param documentId Document ID
|
||||
* @param metadataId Metadata ID
|
||||
* @param value Value
|
||||
*/
|
||||
private static void createValue(String documentId, String metadataId, String value) {
|
||||
DocumentMetadataDao documentMetadataDao = new DocumentMetadataDao();
|
||||
DocumentMetadata documentMetadata = new DocumentMetadata();
|
||||
documentMetadata.setDocumentId(documentId);
|
||||
documentMetadata.setMetadataId(metadataId);
|
||||
documentMetadata.setValue(value);
|
||||
documentMetadataDao.create(documentMetadata);
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a custom metadata value.
|
||||
*
|
||||
* @param documentMetadataId Document metadata ID
|
||||
* @param value Value
|
||||
*/
|
||||
private static void updateValue(String documentMetadataId, String value) {
|
||||
DocumentMetadataDao documentMetadataDao = new DocumentMetadataDao();
|
||||
DocumentMetadata documentMetadata = new DocumentMetadata();
|
||||
documentMetadata.setId(documentMetadataId);
|
||||
documentMetadata.setValue(value);
|
||||
documentMetadataDao.update(documentMetadata);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add custom metadata to a JSON response.
|
||||
*
|
||||
* @param json JSON
|
||||
* @param documentId Document ID
|
||||
*/
|
||||
public static void addMetadata(JsonObjectBuilder json, String documentId) {
|
||||
DocumentMetadataDao documentMetadataDao = new DocumentMetadataDao();
|
||||
MetadataDao metadataDao = new MetadataDao();
|
||||
List<MetadataDto> metadataDtoList = metadataDao.findByCriteria(new MetadataCriteria(), new SortCriteria(1, true));
|
||||
List<DocumentMetadataDto> documentMetadataDtoList = documentMetadataDao.getByDocumentId(documentId);
|
||||
JsonArrayBuilder metadata = Json.createArrayBuilder();
|
||||
for (MetadataDto metadataDto : metadataDtoList) {
|
||||
JsonObjectBuilder meta = Json.createObjectBuilder()
|
||||
.add("id", metadataDto.getId())
|
||||
.add("name", metadataDto.getName())
|
||||
.add("type", metadataDto.getType().name());
|
||||
for (DocumentMetadataDto documentMetadataDto : documentMetadataDtoList) {
|
||||
if (documentMetadataDto.getMetadataId().equals(metadataDto.getId())) {
|
||||
if (documentMetadataDto.getValue() != null) {
|
||||
switch (metadataDto.getType()) {
|
||||
case STRING:
|
||||
meta.add("value", documentMetadataDto.getValue());
|
||||
break;
|
||||
case BOOLEAN:
|
||||
meta.add("value", Boolean.parseBoolean(documentMetadataDto.getValue()));
|
||||
break;
|
||||
case DATE:
|
||||
meta.add("value", Long.parseLong(documentMetadataDto.getValue()));
|
||||
break;
|
||||
case FLOAT:
|
||||
meta.add("value", Double.parseDouble(documentMetadataDto.getValue()));
|
||||
break;
|
||||
case INTEGER:
|
||||
meta.add("value", Integer.parseInt(documentMetadataDto.getValue()));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
metadata.add(meta);
|
||||
}
|
||||
json.add("metadata", metadata);
|
||||
}
|
||||
}
|
||||
@@ -6,6 +6,7 @@ import com.sismics.docs.core.constant.AclType;
|
||||
import com.sismics.docs.core.constant.PermType;
|
||||
import com.sismics.docs.core.dao.AclDao;
|
||||
import com.sismics.docs.core.dao.DocumentDao;
|
||||
import com.sismics.docs.core.dao.RouteModelDao;
|
||||
import com.sismics.docs.core.dao.UserDao;
|
||||
import com.sismics.docs.core.dao.criteria.UserCriteria;
|
||||
import com.sismics.docs.core.dao.dto.RouteStepDto;
|
||||
@@ -15,8 +16,14 @@ import com.sismics.docs.core.event.RouteStepValidateEvent;
|
||||
import com.sismics.docs.core.model.context.AppContext;
|
||||
import com.sismics.docs.core.model.jpa.Acl;
|
||||
import com.sismics.docs.core.model.jpa.Document;
|
||||
import com.sismics.docs.core.model.jpa.RouteModel;
|
||||
import com.sismics.util.context.ThreadLocalContext;
|
||||
|
||||
import javax.json.Json;
|
||||
import javax.json.JsonArray;
|
||||
import javax.json.JsonObject;
|
||||
import javax.json.JsonReader;
|
||||
import java.io.StringReader;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
@@ -87,4 +94,31 @@ public class RoutingUtil {
|
||||
AppContext.getInstance().getMailEventBus().post(routeStepValidateEvent);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Find the first route model name matching a target type and name.
|
||||
*
|
||||
* @param targetType Target type
|
||||
* @param targetName Target name
|
||||
* @return Route model name or null if none is matching
|
||||
*/
|
||||
public static String findRouteModelNameByTargetName(AclTargetType targetType, String targetName) {
|
||||
RouteModelDao routeModelDao = new RouteModelDao();
|
||||
List<RouteModel> routeModelList = routeModelDao.findAll();
|
||||
for (RouteModel routeModel : routeModelList) {
|
||||
try (JsonReader reader = Json.createReader(new StringReader(routeModel.getSteps()))) {
|
||||
JsonArray stepsJson = reader.readArray();
|
||||
for (int order = 0; order < stepsJson.size(); order++) {
|
||||
JsonObject step = stepsJson.getJsonObject(order);
|
||||
JsonObject target = step.getJsonObject("target");
|
||||
AclTargetType routeTargetType = AclTargetType.valueOf(target.getString("type"));
|
||||
String routeTargetName = target.getString("name");
|
||||
if (targetType == routeTargetType && targetName.equals(routeTargetName)) {
|
||||
return routeModel.getName();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
package com.sismics.docs.core.util;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import com.sismics.docs.core.dao.dto.TagDto;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
@@ -12,14 +12,14 @@ import java.util.List;
|
||||
*/
|
||||
public class TagUtil {
|
||||
/**
|
||||
* Recursively find children of a tags.
|
||||
* Recursively find children of a tag.
|
||||
*
|
||||
* @param parentTagDto Parent tag
|
||||
* @param allTagDtoList List of all tags
|
||||
* @return Children tags
|
||||
*/
|
||||
public static List<TagDto> findChildren(TagDto parentTagDto, List<TagDto> allTagDtoList) {
|
||||
List<TagDto> childrenTagDtoList = Lists.newArrayList();
|
||||
List<TagDto> childrenTagDtoList = new ArrayList<>();
|
||||
|
||||
for (TagDto tagDto : allTagDtoList) {
|
||||
if (parentTagDto.getId().equals(tagDto.getParentId())) {
|
||||
@@ -32,15 +32,15 @@ public class TagUtil {
|
||||
}
|
||||
|
||||
/**
|
||||
* Find tags by name (start with).
|
||||
* Find tags by name (start with, ignore case).
|
||||
*
|
||||
* @param name Name
|
||||
* @param allTagDtoList List of all tags
|
||||
* @return List of filtered tags
|
||||
*/
|
||||
public static List<TagDto> findByName(String name, List<TagDto> allTagDtoList) {
|
||||
List<TagDto> tagDtoList = Lists.newArrayList();
|
||||
if (name == null || name.isEmpty()) {
|
||||
List<TagDto> tagDtoList = new ArrayList<>();
|
||||
if (name.isEmpty()) {
|
||||
return tagDtoList;
|
||||
}
|
||||
name = name.toLowerCase();
|
||||
|
||||
@@ -48,7 +48,7 @@ public class ProcessFilesAction implements Action {
|
||||
FileUpdatedAsyncEvent event = new FileUpdatedAsyncEvent();
|
||||
event.setUserId("admin");
|
||||
event.setLanguage(documentDto.getLanguage());
|
||||
event.setFile(file);
|
||||
event.setFileId(file.getId());
|
||||
event.setUnencryptedFile(unencryptedFile);
|
||||
ThreadLocalContext.get().addAsyncEvent(event);
|
||||
}
|
||||
|
||||
@@ -20,7 +20,7 @@ public class AuthenticationUtil {
|
||||
|
||||
.map(clazz -> {
|
||||
try {
|
||||
return clazz.newInstance();
|
||||
return clazz.getDeclaredConstructor().newInstance();
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,127 @@
|
||||
package com.sismics.docs.core.util.authentication;
|
||||
|
||||
import com.sismics.docs.core.constant.ConfigType;
|
||||
import com.sismics.docs.core.constant.Constants;
|
||||
import com.sismics.docs.core.dao.ConfigDao;
|
||||
import com.sismics.docs.core.dao.UserDao;
|
||||
import com.sismics.docs.core.model.jpa.Config;
|
||||
import com.sismics.docs.core.model.jpa.User;
|
||||
import com.sismics.docs.core.util.ConfigUtil;
|
||||
import com.sismics.util.ClasspathScanner;
|
||||
import org.apache.directory.api.ldap.model.cursor.EntryCursor;
|
||||
import org.apache.directory.api.ldap.model.entry.Attribute;
|
||||
import org.apache.directory.api.ldap.model.entry.Entry;
|
||||
import org.apache.directory.api.ldap.model.entry.Value;
|
||||
import org.apache.directory.api.ldap.model.message.SearchScope;
|
||||
import org.apache.directory.ldap.client.api.DefaultLdapConnectionFactory;
|
||||
import org.apache.directory.ldap.client.api.LdapConnectionConfig;
|
||||
import org.apache.directory.ldap.client.api.LdapConnectionPool;
|
||||
import org.apache.directory.ldap.client.api.ValidatingPoolableLdapConnectionFactory;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.util.UUID;
|
||||
|
||||
/**
|
||||
* LDAP authentication handler.
|
||||
*
|
||||
* @author bgamard
|
||||
*/
|
||||
@ClasspathScanner.Priority(50) // Before the internal database
|
||||
public class LdapAuthenticationHandler implements AuthenticationHandler {
|
||||
/**
|
||||
* Logger.
|
||||
*/
|
||||
private static final Logger log = LoggerFactory.getLogger(LdapAuthenticationHandler.class);
|
||||
|
||||
/**
|
||||
* LDAP connection pool.
|
||||
*/
|
||||
private static LdapConnectionPool pool;
|
||||
|
||||
/**
|
||||
* Reset the LDAP pool.
|
||||
*/
|
||||
public static void reset() {
|
||||
if (pool != null) {
|
||||
try {
|
||||
pool.close();
|
||||
} catch (Exception e) {
|
||||
// NOP
|
||||
}
|
||||
}
|
||||
pool = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the LDAP pool.
|
||||
*/
|
||||
private static void init() {
|
||||
ConfigDao configDao = new ConfigDao();
|
||||
Config ldapEnabled = configDao.getById(ConfigType.LDAP_ENABLED);
|
||||
if (pool != null || ldapEnabled == null || !Boolean.parseBoolean(ldapEnabled.getValue())) {
|
||||
return;
|
||||
}
|
||||
|
||||
LdapConnectionConfig config = new LdapConnectionConfig();
|
||||
config.setLdapHost(ConfigUtil.getConfigStringValue(ConfigType.LDAP_HOST));
|
||||
config.setLdapPort(ConfigUtil.getConfigIntegerValue(ConfigType.LDAP_PORT));
|
||||
config.setName(ConfigUtil.getConfigStringValue(ConfigType.LDAP_ADMIN_DN));
|
||||
config.setCredentials(ConfigUtil.getConfigStringValue(ConfigType.LDAP_ADMIN_PASSWORD));
|
||||
|
||||
DefaultLdapConnectionFactory factory = new DefaultLdapConnectionFactory(config);
|
||||
pool = new LdapConnectionPool(new ValidatingPoolableLdapConnectionFactory(factory), null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public User authenticate(String username, String password) {
|
||||
init();
|
||||
if (pool == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Fetch and authenticate the user
|
||||
Entry userEntry;
|
||||
try {
|
||||
EntryCursor cursor = pool.getConnection().search(ConfigUtil.getConfigStringValue(ConfigType.LDAP_BASE_DN),
|
||||
ConfigUtil.getConfigStringValue(ConfigType.LDAP_FILTER).replace("USERNAME", username), SearchScope.SUBTREE);
|
||||
if (cursor.next()) {
|
||||
userEntry = cursor.get();
|
||||
pool.getConnection().bind(userEntry.getDn(), password);
|
||||
} else {
|
||||
// User not found
|
||||
return null;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.error("Error authenticating \"" + username + "\" using the LDAP", e);
|
||||
return null;
|
||||
}
|
||||
|
||||
UserDao userDao = new UserDao();
|
||||
User user = userDao.getActiveByUsername(username);
|
||||
if (user == null) {
|
||||
// The user is valid but never authenticated, create the user now
|
||||
log.info("\"" + username + "\" authenticated for the first time, creating the internal user");
|
||||
user = new User();
|
||||
user.setRoleId(Constants.DEFAULT_USER_ROLE);
|
||||
user.setUsername(username);
|
||||
user.setPassword(UUID.randomUUID().toString()); // No authentication using the internal database
|
||||
Attribute mailAttribute = userEntry.get("mail");
|
||||
if (mailAttribute == null || mailAttribute.get() == null) {
|
||||
user.setEmail(ConfigUtil.getConfigStringValue(ConfigType.LDAP_DEFAULT_EMAIL));
|
||||
} else {
|
||||
Value value = mailAttribute.get();
|
||||
user.setEmail(value.getString());
|
||||
}
|
||||
user.setStorageQuota(ConfigUtil.getConfigLongValue(ConfigType.LDAP_DEFAULT_STORAGE));
|
||||
try {
|
||||
userDao.create(user, "admin");
|
||||
} catch (Exception e) {
|
||||
log.error("Error while creating the internal user", e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
return user;
|
||||
}
|
||||
}
|
||||
@@ -3,6 +3,7 @@ package com.sismics.docs.core.util.format;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.sismics.util.ClasspathScanner;
|
||||
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
@@ -26,12 +27,12 @@ public class FormatHandlerUtil {
|
||||
public static FormatHandler find(String mimeType) {
|
||||
try {
|
||||
for (Class<? extends FormatHandler> formatHandlerClass : FORMAT_HANDLERS) {
|
||||
FormatHandler formatHandler = formatHandlerClass.newInstance();
|
||||
FormatHandler formatHandler = formatHandlerClass.getDeclaredConstructor().newInstance();
|
||||
if (formatHandler.accept(mimeType)) {
|
||||
return formatHandler;
|
||||
}
|
||||
}
|
||||
} catch (InstantiationException | IllegalAccessException e) {
|
||||
} catch (Exception e) {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@ import com.sismics.util.mime.MimeType;
|
||||
import org.apache.pdfbox.io.MemoryUsageSetting;
|
||||
import org.apache.pdfbox.multipdf.PDFMergerUtility;
|
||||
import org.apache.pdfbox.pdmodel.PDDocument;
|
||||
import org.apache.pdfbox.rendering.ImageType;
|
||||
import org.apache.pdfbox.rendering.PDFRenderer;
|
||||
import org.apache.pdfbox.text.PDFTextStripper;
|
||||
import org.slf4j.Logger;
|
||||
@@ -60,7 +61,7 @@ public class PdfFormatHandler implements FormatHandler {
|
||||
for (int pageIndex = 0; pageIndex < pdfDocument.getNumberOfPages(); pageIndex++) {
|
||||
log.info("OCR page " + (pageIndex + 1) + "/" + pdfDocument.getNumberOfPages() + " of PDF file containing only images");
|
||||
sb.append(" ");
|
||||
sb.append(FileUtil.ocrFile(language, renderer.renderImage(pageIndex)));
|
||||
sb.append(FileUtil.ocrFile(language, renderer.renderImageWithDPI(pageIndex, 300, ImageType.GRAY)));
|
||||
}
|
||||
return sb.toString();
|
||||
} catch (Exception e) {
|
||||
|
||||
@@ -9,7 +9,7 @@ import org.apache.pdfbox.pdmodel.PDPageContentStream;
|
||||
import org.apache.pdfbox.pdmodel.common.PDRectangle;
|
||||
import org.apache.pdfbox.pdmodel.graphics.image.LosslessFactory;
|
||||
import org.apache.pdfbox.pdmodel.graphics.image.PDImageXObject;
|
||||
import org.apache.poi.xslf.extractor.XSLFPowerPointExtractor;
|
||||
import org.apache.poi.sl.extractor.SlideShowExtractor;
|
||||
import org.apache.poi.xslf.usermodel.XMLSlideShow;
|
||||
import org.apache.poi.xslf.usermodel.XSLFSlide;
|
||||
|
||||
@@ -50,7 +50,7 @@ public class PptxFormatHandler implements FormatHandler {
|
||||
@Override
|
||||
public String extractContent(String language, Path file) throws Exception {
|
||||
XMLSlideShow pptx = loadPPtxFile(file);
|
||||
return new XSLFPowerPointExtractor(pptx).getText();
|
||||
return new SlideShowExtractor<>(pptx).getText();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -11,6 +11,7 @@ import org.apache.pdfbox.pdmodel.PDDocument;
|
||||
|
||||
import java.awt.image.BufferedImage;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
|
||||
@@ -33,7 +34,7 @@ public class TextPlainFormatHandler implements FormatHandler {
|
||||
PdfWriter.getInstance(output, pdfOutputStream);
|
||||
|
||||
output.open();
|
||||
String content = new String(Files.readAllBytes(file), Charsets.UTF_8);
|
||||
String content = Files.readString(file, StandardCharsets.UTF_8);
|
||||
Font font = FontFactory.getFont("LiberationMono-Regular");
|
||||
Paragraph paragraph = new Paragraph(content, font);
|
||||
paragraph.setAlignment(Element.ALIGN_LEFT);
|
||||
@@ -46,7 +47,7 @@ public class TextPlainFormatHandler implements FormatHandler {
|
||||
|
||||
@Override
|
||||
public String extractContent(String language, Path file) throws Exception {
|
||||
return new String(Files.readAllBytes(file), "UTF-8");
|
||||
return Files.readString(file, StandardCharsets.UTF_8);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
package com.sismics.docs.core.util.format;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import com.google.common.collect.Lists;
|
||||
import com.google.common.io.ByteStreams;
|
||||
import com.google.common.io.Closer;
|
||||
@@ -13,6 +12,7 @@ import javax.imageio.ImageIO;
|
||||
import java.awt.image.BufferedImage;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
@@ -65,7 +65,7 @@ public class VideoFormatHandler implements FormatHandler {
|
||||
|
||||
// Consume the data as a string
|
||||
try (InputStream is = process.getInputStream()) {
|
||||
return new String(ByteStreams.toByteArray(is), Charsets.UTF_8);
|
||||
return new String(ByteStreams.toByteArray(is), StandardCharsets.UTF_8);
|
||||
} catch (Exception e) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -37,16 +37,15 @@ import org.apache.lucene.search.spell.LuceneDictionary;
|
||||
import org.apache.lucene.search.suggest.Lookup;
|
||||
import org.apache.lucene.search.suggest.analyzing.FuzzySuggester;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.NIOFSDirectory;
|
||||
import org.apache.lucene.store.NoLockFactory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.apache.lucene.store.SimpleFSDirectory;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.sql.Timestamp;
|
||||
import java.util.*;
|
||||
|
||||
@@ -117,7 +116,7 @@ public class LuceneIndexingHandler implements IndexingHandler {
|
||||
} else if (luceneStorage.equals("FILE")) {
|
||||
Path luceneDirectory = DirectoryUtil.getLuceneDirectory();
|
||||
log.info("Using file Lucene storage: {}", luceneDirectory);
|
||||
directory = new SimpleFSDirectory(luceneDirectory, NoLockFactory.INSTANCE);
|
||||
directory = new NIOFSDirectory(luceneDirectory, NoLockFactory.INSTANCE);
|
||||
}
|
||||
|
||||
// Create an index writer
|
||||
@@ -252,7 +251,7 @@ public class LuceneIndexingHandler implements IndexingHandler {
|
||||
" s.SHA_DELETEDATE_D IS NULL group by ac.ACL_SOURCEID_C) s on s.ACL_SOURCEID_C = d.DOC_ID_C " +
|
||||
" left join (SELECT count(f.FIL_ID_C) count, f.FIL_IDDOC_C " +
|
||||
" FROM T_FILE f " +
|
||||
" WHERE f.FIL_DELETEDATE_D IS NULL group by f.FIL_IDDOC_C) f on f.FIL_IDDOC_C = d.DOC_ID_C ");
|
||||
" WHERE f.FIL_DELETEDATE_D is null group by f.FIL_IDDOC_C) f on f.FIL_IDDOC_C = d.DOC_ID_C ");
|
||||
sb.append(" left join (select rs.*, rs3.idDocument " +
|
||||
"from T_ROUTE_STEP rs " +
|
||||
"join (select r.RTE_IDDOCUMENT_C idDocument, rs.RTP_IDROUTE_C idRoute, min(rs.RTP_ORDER_N) minOrder from T_ROUTE_STEP rs join T_ROUTE r on r.RTE_ID_C = rs.RTP_IDROUTE_C and r.RTE_DELETEDATE_D is null where rs.RTP_DELETEDATE_D is null and rs.RTP_ENDDATE_D is null group by rs.RTP_IDROUTE_C, r.RTE_IDDOCUMENT_C) rs3 on rs.RTP_IDROUTE_C = rs3.idRoute and rs.RTP_ORDER_N = rs3.minOrder " +
|
||||
@@ -278,7 +277,7 @@ public class LuceneIndexingHandler implements IndexingHandler {
|
||||
criteriaList.add("d.DOC_ID_C in :documentIdList");
|
||||
parameterMap.put("documentIdList", documentSearchMap.keySet());
|
||||
|
||||
suggestSearchTerms(criteria.getSearch(), suggestionList);
|
||||
suggestSearchTerms(criteria.getFullSearch(), suggestionList);
|
||||
}
|
||||
if (criteria.getCreateDateMin() != null) {
|
||||
criteriaList.add("d.DOC_CREATEDATE_D >= :createDateMin");
|
||||
@@ -296,7 +295,11 @@ public class LuceneIndexingHandler implements IndexingHandler {
|
||||
criteriaList.add("d.DOC_UPDATEDATE_D <= :updateDateMax");
|
||||
parameterMap.put("updateDateMax", criteria.getUpdateDateMax());
|
||||
}
|
||||
if (criteria.getTagIdList() != null && !criteria.getTagIdList().isEmpty()) {
|
||||
if (criteria.getTitle() != null) {
|
||||
criteriaList.add("d.DOC_TITLE_C = :title");
|
||||
parameterMap.put("title", criteria.getTitle());
|
||||
}
|
||||
if (!criteria.getTagIdList().isEmpty()) {
|
||||
int index = 0;
|
||||
for (List<String> tagIdList : criteria.getTagIdList()) {
|
||||
List<String> tagCriteriaList = Lists.newArrayList();
|
||||
@@ -309,9 +312,27 @@ public class LuceneIndexingHandler implements IndexingHandler {
|
||||
criteriaList.add("(" + Joiner.on(" OR ").join(tagCriteriaList) + ")");
|
||||
}
|
||||
}
|
||||
if (criteria.getExcludedTagIdList() != null && !criteria.getExcludedTagIdList().isEmpty()) {
|
||||
int index = 0;
|
||||
for (List<String> tagIdList : criteria.getExcludedTagIdList()) {
|
||||
List<String> tagCriteriaList = Lists.newArrayList();
|
||||
for (String tagId : tagIdList) {
|
||||
sb.append(String.format("left join T_DOCUMENT_TAG dtex%d on dtex%d.DOT_IDDOCUMENT_C = d.DOC_ID_C and dtex%d.DOT_IDTAG_C = :tagIdEx%d and dtex%d.DOT_DELETEDATE_D is null ", index, index, index, index, index));
|
||||
parameterMap.put("tagIdEx" + index, tagId);
|
||||
tagCriteriaList.add(String.format("dtex%d.DOT_ID_C is null", index));
|
||||
index++;
|
||||
}
|
||||
criteriaList.add("(" + Joiner.on(" AND ").join(tagCriteriaList) + ")");
|
||||
}
|
||||
}
|
||||
if (criteria.getShared() != null && criteria.getShared()) {
|
||||
criteriaList.add("s.count > 0");
|
||||
}
|
||||
if (criteria.getMimeType() != null) {
|
||||
sb.append("left join T_FILE f0 on f0.FIL_IDDOC_C = d.DOC_ID_C and f0.FIL_MIMETYPE_C = :mimeType and f0.FIL_DELETEDATE_D is null");
|
||||
parameterMap.put("mimeType", criteria.getMimeType());
|
||||
criteriaList.add("f0.FIL_ID_C is not null");
|
||||
}
|
||||
if (criteria.getLanguage() != null) {
|
||||
criteriaList.add("d.DOC_LANGUAGE_C = :language");
|
||||
parameterMap.put("language", criteria.getLanguage());
|
||||
@@ -377,7 +398,7 @@ public class LuceneIndexingHandler implements IndexingHandler {
|
||||
LuceneDictionary dictionary = new LuceneDictionary(directoryReader, "title");
|
||||
suggester.build(dictionary);
|
||||
int lastIndex = search.lastIndexOf(' ');
|
||||
String suggestQuery = search.substring(lastIndex < 0 ? 0 : lastIndex);
|
||||
String suggestQuery = search.substring(Math.max(lastIndex, 0));
|
||||
List<Lookup.LookupResult> lookupResultList = suggester.lookup(suggestQuery, false, 10);
|
||||
for (Lookup.LookupResult lookupResult : lookupResultList) {
|
||||
suggestionList.add(lookupResult.key.toString());
|
||||
|
||||
@@ -29,6 +29,7 @@ import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.StringReader;
|
||||
import java.io.StringWriter;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
@@ -87,29 +88,34 @@ public class EmailUtil {
|
||||
try {
|
||||
// Build email headers
|
||||
HtmlEmail email = new HtmlEmail();
|
||||
email.setCharset("UTF-8");
|
||||
email.setCharset(StandardCharsets.UTF_8.name());
|
||||
ConfigDao configDao = new ConfigDao();
|
||||
|
||||
// Hostname
|
||||
String envHostname = System.getenv(Constants.SMTP_HOSTNAME_ENV);
|
||||
if (envHostname == null) {
|
||||
if (Strings.isNullOrEmpty(envHostname)) {
|
||||
email.setHostName(ConfigUtil.getConfigStringValue(ConfigType.SMTP_HOSTNAME));
|
||||
} else {
|
||||
email.setHostName(envHostname);
|
||||
}
|
||||
|
||||
// Port
|
||||
int port = ConfigUtil.getConfigIntegerValue(ConfigType.SMTP_PORT);
|
||||
String envPort = System.getenv(Constants.SMTP_PORT_ENV);
|
||||
if (envPort == null) {
|
||||
email.setSmtpPort(ConfigUtil.getConfigIntegerValue(ConfigType.SMTP_PORT));
|
||||
} else {
|
||||
email.setSmtpPort(Integer.valueOf(envPort));
|
||||
if (!Strings.isNullOrEmpty(envPort)) {
|
||||
port = Integer.valueOf(envPort);
|
||||
}
|
||||
email.setSmtpPort(port);
|
||||
if (port == 465) {
|
||||
email.setSSLOnConnect(true);
|
||||
} else if (port == 587) {
|
||||
email.setStartTLSRequired(true);
|
||||
}
|
||||
|
||||
// Username and password
|
||||
String envUsername = System.getenv(Constants.SMTP_USERNAME_ENV);
|
||||
String envPassword = System.getenv(Constants.SMTP_PASSWORD_ENV);
|
||||
if (envUsername == null || envPassword == null) {
|
||||
if (Strings.isNullOrEmpty(envUsername) || Strings.isNullOrEmpty(envPassword)) {
|
||||
Config usernameConfig = configDao.getById(ConfigType.SMTP_USERNAME);
|
||||
Config passwordConfig = configDao.getById(ConfigType.SMTP_PASSWORD);
|
||||
if (usernameConfig != null && passwordConfig != null) {
|
||||
@@ -124,11 +130,11 @@ public class EmailUtil {
|
||||
|
||||
// Application name
|
||||
Config themeConfig = configDao.getById(ConfigType.THEME);
|
||||
String appName = "Sismics Docs";
|
||||
String appName = "Teedy";
|
||||
if (themeConfig != null) {
|
||||
try (JsonReader reader = Json.createReader(new StringReader(themeConfig.getValue()))) {
|
||||
JsonObject themeJson = reader.readObject();
|
||||
appName = themeJson.getString("name", "Sismics Docs");
|
||||
appName = themeJson.getString("name", "Teedy");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ public class EnvironmentUtil {
|
||||
|
||||
private static String MAC_OS_USER_HOME = System.getProperty("user.home");
|
||||
|
||||
private static String DOCS_HOME = System.getProperty("docs.home");
|
||||
private static String TEEDY_HOME = System.getProperty("docs.home");
|
||||
|
||||
/**
|
||||
* In a web application context.
|
||||
@@ -90,8 +90,8 @@ public class EnvironmentUtil {
|
||||
*
|
||||
* @return Home directory
|
||||
*/
|
||||
public static String getDocsHome() {
|
||||
return DOCS_HOME;
|
||||
public static String getTeedyHome() {
|
||||
return TEEDY_HOME;
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
package com.sismics.util;
|
||||
|
||||
import org.jsoup.helper.StringUtil;
|
||||
import org.jsoup.internal.StringUtil;
|
||||
import org.jsoup.nodes.Element;
|
||||
import org.jsoup.nodes.Node;
|
||||
import org.jsoup.nodes.TextNode;
|
||||
@@ -28,7 +28,7 @@ public class HtmlToPlainText {
|
||||
}
|
||||
|
||||
// the formatting rules, implemented in a breadth-first DOM traverse
|
||||
private class FormattingVisitor implements NodeVisitor {
|
||||
static private class FormattingVisitor implements NodeVisitor {
|
||||
private static final int maxWidth = 80;
|
||||
private int width = 0;
|
||||
private StringBuilder accum = new StringBuilder(); // holds the accumulated text
|
||||
@@ -64,7 +64,7 @@ public class HtmlToPlainText {
|
||||
return; // don't accumulate long runs of empty spaces
|
||||
|
||||
if (text.length() + width > maxWidth) { // won't fit, needs to wrap
|
||||
String words[] = text.split("\\s+");
|
||||
String[] words = text.split("\\s+");
|
||||
for (int i = 0; i < words.length; i++) {
|
||||
String word = words[i];
|
||||
boolean last = i == words.length - 1;
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
package com.sismics.util;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import com.google.common.hash.Hashing;
|
||||
|
||||
import javax.imageio.IIOImage;
|
||||
@@ -13,6 +12,7 @@ import java.awt.image.BufferedImage;
|
||||
import java.awt.image.WritableRaster;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Iterator;
|
||||
|
||||
/**
|
||||
@@ -80,7 +80,7 @@ public class ImageUtil {
|
||||
}
|
||||
|
||||
return Hashing.md5().hashString(
|
||||
email.trim().toLowerCase(), Charsets.UTF_8)
|
||||
email.trim().toLowerCase(), StandardCharsets.UTF_8)
|
||||
.toString();
|
||||
}
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@ import java.io.IOException;
|
||||
import java.net.URISyntaxException;
|
||||
import java.net.URL;
|
||||
import java.net.URLDecoder;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.text.MessageFormat;
|
||||
import java.util.*;
|
||||
import java.util.jar.JarEntry;
|
||||
@@ -53,7 +54,7 @@ public class ResourceUtil {
|
||||
|
||||
// Extract the JAR path
|
||||
String jarPath = dirUrl.getPath().substring(5, dirUrl.getPath().indexOf("!"));
|
||||
JarFile jar = new JarFile(URLDecoder.decode(jarPath, "UTF-8"));
|
||||
JarFile jar = new JarFile(URLDecoder.decode(jarPath, StandardCharsets.UTF_8));
|
||||
Set<String> fileSet = new HashSet<String>();
|
||||
|
||||
try {
|
||||
|
||||
@@ -48,8 +48,11 @@ public class DialectUtil {
|
||||
sql = sql.replaceAll("(cached|memory) table", "table");
|
||||
sql = sql.replaceAll("datetime", "timestamp");
|
||||
sql = sql.replaceAll("longvarchar", "text");
|
||||
sql = sql.replaceAll("bit not null", "bool not null");
|
||||
sql = sql.replaceAll("bit default 1", "bool default true");
|
||||
sql = sql.replaceAll("bit default 0", "bool default false");
|
||||
sql = sql.replaceAll("bit not null default 1", "bool not null default true");
|
||||
sql = sql.replaceAll("bit not null default 0", "bool not null default false");
|
||||
sql = sql.replaceAll("bit not null", "bool not null");
|
||||
return sql;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
package com.sismics.util.jpa;
|
||||
|
||||
import com.google.common.base.Strings;
|
||||
import com.sismics.docs.core.util.DirectoryUtil;
|
||||
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
|
||||
import org.hibernate.cfg.Environment;
|
||||
import org.hibernate.internal.util.config.ConfigurationHelper;
|
||||
import org.hibernate.service.ServiceRegistry;
|
||||
import org.slf4j.Logger;
|
||||
@@ -34,7 +34,6 @@ public final class EMF {
|
||||
try {
|
||||
properties = getEntityManagerProperties();
|
||||
|
||||
Environment.verifyProperties(properties);
|
||||
ConfigurationHelper.resolvePlaceHolders(properties);
|
||||
ServiceRegistry reg = new StandardServiceRegistryBuilder().applySettings(properties).build();
|
||||
|
||||
@@ -85,10 +84,10 @@ public final class EMF {
|
||||
Map<Object, Object> props = new HashMap<>();
|
||||
Path dbDirectory = DirectoryUtil.getDbDirectory();
|
||||
String dbFile = dbDirectory.resolve("docs").toAbsolutePath().toString();
|
||||
if (databaseUrl == null) {
|
||||
if (Strings.isNullOrEmpty(databaseUrl)) {
|
||||
props.put("hibernate.connection.driver_class", "org.h2.Driver");
|
||||
props.put("hibernate.dialect", "org.hibernate.dialect.HSQLDialect");
|
||||
props.put("hibernate.connection.url", "jdbc:h2:file:" + dbFile + ";CACHE_SIZE=65536");
|
||||
props.put("hibernate.connection.url", "jdbc:h2:file:" + dbFile + ";CACHE_SIZE=65536;LOCK_TIMEOUT=10000");
|
||||
props.put("hibernate.connection.username", "sa");
|
||||
} else {
|
||||
props.put("hibernate.connection.driver_class", "org.postgresql.Driver");
|
||||
|
||||
@@ -13,7 +13,7 @@ public class MimeType {
|
||||
public static final String IMAGE_GIF = "image/gif";
|
||||
|
||||
public static final String APPLICATION_ZIP = "application/zip";
|
||||
|
||||
|
||||
public static final String APPLICATION_PDF = "application/pdf";
|
||||
|
||||
public static final String OPEN_DOCUMENT_TEXT = "application/vnd.oasis.opendocument.text";
|
||||
|
||||
@@ -1,15 +1,9 @@
|
||||
package com.sismics.util.mime;
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import org.apache.commons.compress.utils.IOUtils;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.URLConnection;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipInputStream;
|
||||
|
||||
/**
|
||||
* Utility to check MIME types.
|
||||
@@ -18,7 +12,7 @@ import java.util.zip.ZipInputStream;
|
||||
*/
|
||||
public class MimeTypeUtil {
|
||||
/**
|
||||
* Try to guess the MIME type of a file by its magic number (header).
|
||||
* Try to guess the MIME type of a file.
|
||||
*
|
||||
* @param file File to inspect
|
||||
* @param name File name
|
||||
@@ -26,57 +20,17 @@ public class MimeTypeUtil {
|
||||
* @throws IOException e
|
||||
*/
|
||||
public static String guessMimeType(Path file, String name) throws IOException {
|
||||
String mimeType;
|
||||
try (InputStream is = Files.newInputStream(file)) {
|
||||
byte[] headerBytes = new byte[64];
|
||||
is.read(headerBytes);
|
||||
mimeType = guessMimeType(headerBytes, name);
|
||||
String mimeType = Files.probeContentType(file);
|
||||
|
||||
if (mimeType == null && name != null) {
|
||||
mimeType = URLConnection.getFileNameMap().getContentTypeFor(name);
|
||||
}
|
||||
|
||||
return guessOpenDocumentFormat(mimeType, file);
|
||||
}
|
||||
|
||||
/**
|
||||
* Try to guess the MIME type of a file by its magic number (header).
|
||||
*
|
||||
* @param headerBytes File header (first bytes)
|
||||
* @param name File name
|
||||
* @return MIME type
|
||||
* @throws UnsupportedEncodingException e
|
||||
*/
|
||||
public static String guessMimeType(byte[] headerBytes, String name) throws UnsupportedEncodingException {
|
||||
String header = new String(headerBytes, "US-ASCII");
|
||||
|
||||
// Detect by header bytes
|
||||
if (header.startsWith("PK")) {
|
||||
return MimeType.APPLICATION_ZIP;
|
||||
} else if (header.startsWith("GIF87a") || header.startsWith("GIF89a")) {
|
||||
return MimeType.IMAGE_GIF;
|
||||
} else if (headerBytes[0] == ((byte) 0xff) && headerBytes[1] == ((byte) 0xd8)) {
|
||||
return MimeType.IMAGE_JPEG;
|
||||
} else if (headerBytes[0] == ((byte) 0x89) && headerBytes[1] == ((byte) 0x50) && headerBytes[2] == ((byte) 0x4e) && headerBytes[3] == ((byte) 0x47) &&
|
||||
headerBytes[4] == ((byte) 0x0d) && headerBytes[5] == ((byte) 0x0a) && headerBytes[6] == ((byte) 0x1a) && headerBytes[7] == ((byte) 0x0a)) {
|
||||
return MimeType.IMAGE_PNG;
|
||||
} else if (headerBytes[0] == ((byte) 0x25) && headerBytes[1] == ((byte) 0x50) && headerBytes[2] == ((byte) 0x44) && headerBytes[3] == ((byte) 0x46)) {
|
||||
return MimeType.APPLICATION_PDF;
|
||||
} else if (headerBytes[0] == ((byte) 0x00) && headerBytes[1] == ((byte) 0x00) && headerBytes[2] == ((byte) 0x00)
|
||||
&& (headerBytes[3] == ((byte) 0x14) || headerBytes[3] == ((byte) 0x18) || headerBytes[3] == ((byte) 0x20))
|
||||
&& headerBytes[4] == ((byte) 0x66) && headerBytes[5] == ((byte) 0x74) && headerBytes[6] == ((byte) 0x79) && headerBytes[7] == ((byte) 0x70)) {
|
||||
return MimeType.VIDEO_MP4;
|
||||
} else if (headerBytes[0] == ((byte) 0x1a) && headerBytes[1] == ((byte) 0x45) && headerBytes[2] == ((byte) 0xdf) && headerBytes[3] == ((byte) 0xa3)) {
|
||||
return MimeType.VIDEO_WEBM;
|
||||
if (mimeType == null) {
|
||||
return MimeType.DEFAULT;
|
||||
}
|
||||
|
||||
// Detect by file extension
|
||||
if (name != null) {
|
||||
if (name.endsWith(".txt")) {
|
||||
return MimeType.TEXT_PLAIN;
|
||||
} else if (name.endsWith(".csv")) {
|
||||
return MimeType.TEXT_CSV;
|
||||
}
|
||||
}
|
||||
|
||||
return MimeType.DEFAULT;
|
||||
return mimeType;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -113,52 +67,4 @@ public class MimeTypeUtil {
|
||||
return "bin";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Guess the MIME type of open document formats (docx and odt).
|
||||
* It's more costly than the simple header check, but needed because open document formats
|
||||
* are simple ZIP files on the outside and much bigger on the inside.
|
||||
*
|
||||
* @param mimeType Currently detected MIME type
|
||||
* @param file File on disk
|
||||
* @return MIME type
|
||||
*/
|
||||
private static String guessOpenDocumentFormat(String mimeType, Path file) {
|
||||
if (!MimeType.APPLICATION_ZIP.equals(mimeType)) {
|
||||
// open document formats are ZIP files
|
||||
return mimeType;
|
||||
}
|
||||
|
||||
try (InputStream inputStream = Files.newInputStream(file);
|
||||
ZipInputStream zipInputStream = new ZipInputStream(inputStream, Charsets.ISO_8859_1)) {
|
||||
ZipEntry archiveEntry = zipInputStream.getNextEntry();
|
||||
while (archiveEntry != null) {
|
||||
if (archiveEntry.getName().equals("mimetype")) {
|
||||
// Maybe it's an ODT file
|
||||
String content = new String(IOUtils.toByteArray(zipInputStream), Charsets.ISO_8859_1);
|
||||
if (MimeType.OPEN_DOCUMENT_TEXT.equals(content.trim())) {
|
||||
mimeType = MimeType.OPEN_DOCUMENT_TEXT;
|
||||
break;
|
||||
}
|
||||
} else if (archiveEntry.getName().equals("[Content_Types].xml")) {
|
||||
// Maybe it's a DOCX file
|
||||
String content = new String(IOUtils.toByteArray(zipInputStream), Charsets.ISO_8859_1);
|
||||
if (content.contains(MimeType.OFFICE_DOCUMENT)) {
|
||||
mimeType = MimeType.OFFICE_DOCUMENT;
|
||||
break;
|
||||
} else if (content.contains(MimeType.OFFICE_PRESENTATION)) {
|
||||
mimeType = MimeType.OFFICE_PRESENTATION;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
archiveEntry = zipInputStream.getNextEntry();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
// In case of any error, just give up and keep the ZIP MIME type
|
||||
return mimeType;
|
||||
}
|
||||
|
||||
return mimeType;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
db.version=22
|
||||
db.version=27
|
||||
@@ -41,4 +41,4 @@ insert into T_LOCALE(LOC_ID_C) values('fr');
|
||||
insert into T_ROLE(ROL_ID_C, ROL_NAME_C, ROL_CREATEDATE_D) values('admin', 'Admin', NOW());
|
||||
insert into T_ROLE(ROL_ID_C, ROL_NAME_C, ROL_CREATEDATE_D) values('user', 'User', NOW());
|
||||
insert into T_ROLE_BASE_FUNCTION(RBF_ID_C, RBF_IDROLE_C, RBF_IDBASEFUNCTION_C, RBF_CREATEDATE_D) values('admin_ADMIN', 'admin', 'ADMIN', NOW());
|
||||
insert into T_USER(USE_ID_C, USE_IDLOCALE_C, USE_IDROLE_C, USE_USERNAME_C, USE_PASSWORD_C, USE_EMAIL_C, USE_THEME_C, USE_FIRSTCONNECTION_B, USE_CREATEDATE_D, USE_PRIVATEKEY_C) values('admin', 'en', 'admin', 'admin', '$2a$05$6Ny3TjrW3aVAL1or2SlcR.fhuDgPKp5jp.P9fBXwVNePgeLqb4i3C', 'admin@localhost', 'default.less', true, NOW(), 'AdminPk');
|
||||
insert into T_USER(USE_ID_C, USE_IDLOCALE_C, USE_IDROLE_C, USE_USERNAME_C, USE_PASSWORD_C, USE_EMAIL_C, USE_THEME_C, USE_FIRSTCONNECTION_B, USE_CREATEDATE_D, USE_PRIVATEKEY_C) values('admin', 'en', 'admin', 'admin', '$2y$10$xg0EEKVUehutDI1m6qQhVeFz7SMQMl1jQzjf2KkVsR2c7aV2vyyjK', 'admin@localhost', 'default.less', true, NOW(), 'AdminPk');
|
||||
|
||||
@@ -0,0 +1,2 @@
|
||||
alter table T_USER add column USE_ONBOARDING_B bit not null default 1;
|
||||
update T_CONFIG set CFG_VALUE_C = '23' where CFG_ID_C = 'DB_VERSION';
|
||||
@@ -0,0 +1,5 @@
|
||||
create cached table T_METADATA ( MET_ID_C varchar(36) not null, MET_NAME_C varchar(50) not null, MET_TYPE_C varchar(20) not null, MET_DELETEDATE_D datetime, primary key (MET_ID_C) );
|
||||
create cached table T_DOCUMENT_METADATA ( DME_ID_C varchar(36) not null, DME_IDDOCUMENT_C varchar(36) not null, DME_IDMETADATA_C varchar(36) not null, DME_VALUE_C varchar(4000) null, primary key (DME_ID_C) );
|
||||
alter table T_DOCUMENT_METADATA add constraint FK_DME_IDDOCUMENT_C foreign key (DME_IDDOCUMENT_C) references T_DOCUMENT (DOC_ID_C) on delete restrict on update restrict;
|
||||
alter table T_DOCUMENT_METADATA add constraint FK_DME_IDMETADATA_C foreign key (DME_IDMETADATA_C) references T_METADATA (MET_ID_C) on delete restrict on update restrict;
|
||||
update T_CONFIG set CFG_VALUE_C = '24' where CFG_ID_C = 'DB_VERSION';
|
||||
@@ -0,0 +1,3 @@
|
||||
insert into T_CONFIG(CFG_ID_C, CFG_VALUE_C) values('INBOX_AUTOMATIC_TAGS', 'false');
|
||||
insert into T_CONFIG(CFG_ID_C, CFG_VALUE_C) values('INBOX_DELETE_IMPORTED', 'false');
|
||||
update T_CONFIG set CFG_VALUE_C = '25' where CFG_ID_C = 'DB_VERSION';
|
||||
@@ -0,0 +1,2 @@
|
||||
!PGSQL!UPDATE t_file SET fil_content_c = convert_from(loread(lo_open(fil_content_c::int, CAST( x'20000' AS integer)), 999999999), 'UNICODE')::TEXT WHERE fil_content_c IS NOT NULL;
|
||||
update T_CONFIG set CFG_VALUE_C = '26' where CFG_ID_C = 'DB_VERSION';
|
||||
@@ -0,0 +1,2 @@
|
||||
insert into T_CONFIG(CFG_ID_C, CFG_VALUE_C) values('INBOX_FOLDER', 'INBOX');
|
||||
update T_CONFIG set CFG_VALUE_C = '27' where CFG_ID_C = 'DB_VERSION';
|
||||
@@ -1,10 +0,0 @@
|
||||
email.template.password_recovery.subject=Bitte setzen Sie ihr Passwort zur\u00FCck
|
||||
email.template.password_recovery.hello=Hallo {0}.
|
||||
email.template.password_recovery.instruction1=Wir haben eine Anfrage zum Zur\u00FCcksetzen Ihres Passworts erhalten.<br/>Wenn Sie keine Hilfe angefordert haben, k\u00F6nnen Sie diese E-Mail einfach ignorieren.
|
||||
email.template.password_recovery.instruction2=Um Ihr Passwort zur\u00FCckzusetzen, besuchen Sie bitte den folgenden Link:
|
||||
email.template.password_recovery.click_here=Klicken Sie hier, um Ihr Passwort zur\u00FCckzusetzen
|
||||
email.template.route_step_validate.subject=Ein Dokument braucht Ihre Aufmerksamkeit
|
||||
email.template.route_step_validate.hello=Hallo {0}.
|
||||
email.template.route_step_validate.instruction1=Ihnen wurde ein Workflow-Schritt zugewiesen, der Ihre Aufmerksamkeit erfordert.
|
||||
email.template.route_step_validate.instruction2=Um das Dokument anzuzeigen und den Workflow zu \u00FCberpr\u00FCfen, besuchen Sie bitte den folgenden Link:
|
||||
email.no_html.error=Ihr E-Mail-Client unterst\u00FCtzt keine HTML-Nachrichten
|
||||
@@ -1,10 +0,0 @@
|
||||
email.template.password_recovery.subject=R\u00E9initialiser votre mot de passe
|
||||
email.template.password_recovery.hello=Bonjour {0}.
|
||||
email.template.password_recovery.instruction1=Nous avons re\u00E7u une demande de r\u00E9initialisation de mot de passe.<br/>Si vous n'avez rien demand\u00E9, vous pouvez ignorer cet mail.
|
||||
email.template.password_recovery.instruction2=Pour r\u00E9initialiser votre mot de passe, cliquez sur le lien ci-dessous :
|
||||
email.template.password_recovery.click_here=Cliquez ici pour r\u00E9initialiser votre mot de passe.
|
||||
email.template.route_step_validate.subject=Un document n\u00E9cessite votre attention
|
||||
email.template.route_step_validate.hello=Bonjour {0}.
|
||||
email.template.route_step_validate.instruction1=Une \u00E9tape de workflow vous a \u00E9t\u00E9 attribu\u00E9e et n\u00E9cessite votre attention.
|
||||
email.template.route_step_validate.instruction2=Pour voir le document et valider le workflow, veuillez visiter le lien ci-dessous :
|
||||
email.no_html.error=Votre client mail ne supporte pas les messages HTML
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user