1
0
mirror of https://github.com/sismics/docs.git synced 2025-12-14 18:26:17 +00:00

132 Commits
v1.8 ... v1.11

Author SHA1 Message Date
bgamard
59597e962d 1.11 2023-03-12 13:58:03 +01:00
bgamard
c85a951a9e upgrade base image 2023-03-12 13:52:30 +01:00
bgamard
7f47a17633 upgrade jetty 2023-03-12 13:45:36 +01:00
bgamard
690c961a55 Merge remote-tracking branch 'origin/master' 2023-03-12 13:35:51 +01:00
bgamard
21efd1e4a7 Closes #658 2023-03-12 13:35:35 +01:00
@RandyMcMillan
ad27228429 docker-compose.yml: add example config (#665) 2023-02-20 11:51:39 +01:00
@RandyMcMillan
dd4a1667ca .gitignore: add docs/.gitkeep (#664) 2023-02-20 11:51:30 +01:00
@RandyMcMillan
399d2b7951 minor grammar corrections (#663) 2023-02-19 21:31:30 +01:00
bgamard
d51dfd6636 #647: fix doc 2022-08-26 18:18:06 +02:00
bgamard
ca85c1fa9f #647: always return OK on password lost route 2022-08-26 18:15:49 +02:00
bgamard
5e7f06070e keep filename in temporary file 2022-05-16 19:22:54 +02:00
bgamard
dc0c20cd0c moved tests 2022-05-16 18:53:08 +02:00
bgamard
98aa33341a moved tests 2022-05-16 18:50:19 +02:00
bgamard
1f7c0afc1e Closes #639: rework mime type resolution using java api 2022-05-16 18:44:26 +02:00
bgamard
1ccce3f942 rename 2022-05-05 18:15:24 +02:00
Uli
90d5bc8de7 Allow the . (dot) and @ (at) character in usernames (#637)
Co-authored-by: Uli Koeth <uli@kiot.eu>
2022-05-05 17:48:45 +02:00
bgamard
c6a685d7c0 Closes #620: delete a non-existing document should return 404 2022-04-17 13:35:29 +02:00
bgamard
e6cfd899e5 Closes #632: validate POST /app/config_inbox and update documentation 2022-04-17 13:23:22 +02:00
Julien Kirch
bd23f14792 Add doc for search syntax (#634) 2022-04-17 13:10:01 +02:00
Julien Kirch
46f6b9e537 Download zip of files not in same document (#591) 2022-04-15 10:18:39 +02:00
Julien Kirch
d5832c48e1 Small code cleaning 2022-03-21 11:36:25 +01:00
Julien Kirch
64ec0f63ca Add parameter to return the files when searching for a document (#582) 2022-03-20 11:36:28 +01:00
Ben Grabham
0b7c42e814 Check if environment variables are not empty strings as well as not null (#623) 2022-02-20 15:48:37 +01:00
bgamard
d8dc63fc98 Merge remote-tracking branch 'origin/master' 2022-02-02 21:18:06 +01:00
bgamard
81a7f154c2 logs only for admin 2022-02-02 21:17:58 +01:00
StaryVena
af3263d471 Add OCR support for Czech language (#613)
Co-authored-by: Vaclav Uher <vaclav.uher@bruker.com>
2022-01-26 15:27:14 +01:00
Dan Schaper
bbe5f19997 Tag latest on master, tag version on github tag. (#612)
Signed-off-by: Dan Schaper <dan.schaper@pi-hole.net>
2022-01-25 10:37:47 +01:00
Benjamin Gamard
f33650c099 fix action 2022-01-21 13:51:16 +01:00
Benjamin Gamard
58f81ec851 fix action 2022-01-21 13:37:31 +01:00
Dan Schaper
c9262eb204 Add build tags and labels (#608)
Fixes Docker images always build as 'latest' #607

Signed-off-by: Dan Schaper <dan.schaper@pi-hole.net>
2022-01-21 13:35:39 +01:00
bgamard
3637b832e5 test the new mime type detection 2022-01-17 14:37:22 +01:00
Joost Timmerman
ee56cfe2b4 Support audio mime (#574) 2022-01-17 14:24:50 +01:00
bgamard
721410c7d0 add test dependencies 2022-01-13 00:15:37 +01:00
bgamard
f0310e3933 add test dependencies 2022-01-13 00:06:29 +01:00
bgamard
302d7cccc4 run tests + fix docker username 2022-01-12 23:59:43 +01:00
Dan Schaper
f9977d5ce6 Actions workflow (#601)
Signed-off-by: Dan Schaper <dan@glacialmagma.com>
2022-01-12 23:49:34 +01:00
bgamard
0a927fd320 add application/x-www-form-urlencoded to delete requests 2022-01-02 16:46:20 +01:00
bgamard
523501a592 consumes application/x-www-form-urlencoded 2022-01-02 16:40:01 +01:00
bgamard
ff8155be6a upgrade docker image to use jetty 9.4.36 2022-01-02 16:06:36 +01:00
bgamard
6c5d697051 Merge remote-tracking branch 'origin/master' 2022-01-02 15:39:11 +01:00
bgamard
b19145160e release 1.10 2022-01-02 15:39:00 +01:00
Roland Illig
c7ada71ef5 proofread German translation (#566)
* plural forms
* spelling of composed words
* spaces between numbers and measurement units
* typographic ellipsis (\u2026) instead of three dots
2021-11-20 20:34:36 +01:00
bgamard
4951229576 escape ngTranslate parameters 2021-11-16 20:01:36 +01:00
Julien Kirch
d98c1bddec Add custom parameter for exact search by title 2021-10-12 13:50:32 +02:00
Dan Schaper
b0d0e93364 Remove duplicate tesseact language and alphabetize (#579)
Signed-off-by: Dan Schaper <dan@glacialmagma.com>
2021-09-30 13:23:58 +02:00
Benjamin Gamard
f20a562439 remove form url encoded from baseresource 2021-08-20 10:45:08 +02:00
Hung Nguyen
4ae8475f5e Add Vietnamese language support (#549) 2021-06-21 10:51:31 +02:00
Benjamin Gamard
fd4c627c61 remove travis 2021-05-12 19:38:58 +02:00
Benjamin Gamard
a867d48232 remove travis 2021-05-12 19:38:45 +02:00
Somebodyisnobody
f6bf61fce9 Update de.json (#532)
Fix typo
2021-03-31 19:08:58 +02:00
bgamard
c60c9a8f74 Merge remote-tracking branch 'origin/master' 2021-02-12 21:54:33 +01:00
bgamard
dc021ab71e Closes #520: downgrade H2 to 1.4.199 2021-02-12 21:54:25 +01:00
Pascal Pischel
18b5551f6c Fix german translation 2021-02-12 21:48:57 +01:00
bgamard
6fcd8771a5 upgrade to java 11 + upgrade libraries 2021-01-25 22:40:58 +01:00
bgamard
1fef4c3d2e next dev iteration + cleanup stress project 2021-01-25 21:31:14 +01:00
bgamard
ee6ed2bf0b v1.9 2021-01-25 21:27:22 +01:00
bgamard
57b67fee09 Closes #458: fix css glitch on mobile 2021-01-21 18:14:39 +01:00
bgamard
a6cbacae72 Closes #509: guest users cannot share and unshare 2021-01-21 17:58:36 +01:00
Vec7or
1e0f8e2484 Closes #472: redirect to previous URL after login 2021-01-21 17:44:48 +01:00
bgamard
bcb4c6d7b0 Merge remote-tracking branch 'origin/master' 2021-01-21 17:39:23 +01:00
bgamard
ea1d5907c1 #497: fix npe in unauthenticated cases 2021-01-21 17:39:01 +01:00
Vegard Hoff Walmsness
05bac38fc3 Norwegian language support 2021-01-14 20:20:16 +01:00
Cornelicorn
69746cd369 #486: Fix importer default file filter 2021-01-06 13:51:29 +01:00
Vec7or
ff3db531e5 Configure bcrypt work 2021-01-05 18:59:18 +01:00
bgamard
558de7ba3f README.md 2020-12-31 07:50:04 +01:00
Vec7or
af15116bf9 Upgrade bcrypt library + explain env variables 2020-12-31 07:46:00 +01:00
Benjamin Gamard
36e5a9747b Merge pull request #489 from Vec7or/fix-color-bug
Fix tag-colors inside inherited acl
2020-12-30 23:45:57 +01:00
Vec7or
1d66b47f5f Fix tag-colors inside inherited acl 2020-12-30 17:33:24 +01:00
Evil McJerkface
1346dd3616 Add option to specify a particular IMAP folder (aka "label" in Gmail) (#477) 2020-11-22 13:39:39 +01:00
Evil McJerkface
b6ec5e108b Added support for TLS & STARTTLS for SMTP connections. If port 465 is configured, TLS will be assumed. If port 587 is used, STARTTLS is assumed. (#478)
Closes #353: Added support for TLS & STARTTLS for SMTP connections
2020-11-19 10:15:40 +01:00
bgamard
5b2833350c Closes #391: change english date format to yyyy/mm/dd 2020-10-31 20:20:11 +01:00
bgamard
66acb380ab Closes #451: convert lob content to text for pgsql 2020-10-31 20:14:06 +01:00
bgamard
00c62f2ad4 Closes #467: italian translation 2020-10-31 20:11:55 +01:00
bgamard
7205863d95 Closes #469: make sure the IP sent by the forward proxy is not bigger than 45 chars 2020-10-23 19:31:27 +02:00
Pyrox
2a4274d583 Italian translation (#465) 2020-10-16 13:54:49 +02:00
bgamard
087184b598 Closes #466: remove duplicate translation resources 2020-10-16 13:53:12 +02:00
bgamard
e5600e0be7 add fallback for el language 2020-10-14 22:54:22 +02:00
Benjamin Gamard
964f3128d2 Merge pull request #463 from kazelot/dev-language-pl
Add  polish language
2020-10-14 22:49:57 +02:00
marcin
69905cdc55 Merge with last changes in master branch
Fix polish translation
2020-10-14 22:42:59 +02:00
marcin
bf4e277db7 Add translation to timeago.js
Update translation
2020-10-14 19:07:40 +02:00
bgamard
eaa7cca278 Closes #460: minification error following #455 2020-10-14 18:45:18 +02:00
marcin
0e115bb808 Translate to polish 2020-10-14 11:57:00 +02:00
marcin
1897f5567b Initial commit 2020-10-14 10:49:08 +02:00
bgamard
d647528b3c #455: greek translation of angular-timeago by @gdepountis 2020-10-12 19:36:59 +02:00
bgamard
07d42cdb9c Merge remote-tracking branch 'origin/master' 2020-10-12 10:44:41 +02:00
bgamard
dabb960c94 #455: greek translation by @gdepountis 2020-10-12 10:44:28 +02:00
Benjamin Gamard
c71e794051 Merge pull request #454 from vmario89/patch-3
Update de.json
2020-10-08 12:52:43 +02:00
Mario Voigt
1584c0cbb2 Update de.json
Small lang fix in de.json
2020-10-08 09:52:38 +02:00
bgamard
22f0f1abf4 Closes #453: load gravatar icons in HTTPS 2020-10-06 16:37:52 +02:00
Jean-Marc Tremeaux
205f92d093 Upgrade to JDK 11.0.8 2020-09-24 13:12:45 +02:00
Jean-Marc Tremeaux
7488ac15a7 Merge remote-tracking branch 'origin/master' 2020-09-24 12:45:08 +02:00
bgamard
44f5db993a #451: remove @Lob on file content 2020-09-13 17:58:28 +02:00
bgamard
f76eae23ca Merge remote-tracking branch 'origin/master' 2020-09-10 20:44:20 +02:00
bgamard
5e2a18f819 #451: update the file content with an Hibernate query instead of a native query 2020-09-10 20:42:51 +02:00
Benjamin Gamard
2f6e5d53c2 Merge pull request #449 from muhsinkutay/patch-1
Fix typo in English translation
2020-09-08 02:02:59 +02:00
muhsinkutay
50e6c4d965 Update en.json
Line 6: Misspelling
2020-09-08 02:51:53 +03:00
bgamard
3ad0554a7d use org.apache.directory.api for LDAP instead of apacheds 2020-08-29 19:10:14 +02:00
bgamard
113ec78c67 import less apacheds dependencies 2020-08-28 19:33:58 +02:00
bgamard
f814927eca update README.md 2020-08-28 18:10:28 +02:00
bgamard
a9719feeec LDAP support, courtesy of an anonymous donator 2020-08-28 18:09:54 +02:00
bgamard
6dc4f1b448 #423: fulltext search by default 2020-08-28 17:47:07 +02:00
bgamard
e1fa17691d Merge remote-tracking branch 'origin/master' 2020-08-28 17:34:03 +02:00
bgamard
42e61d6e1f #423: fulltext search by default 2020-08-28 17:33:27 +02:00
Jamie Magee
2bf3e6bd3c Danish language support (#438) 2020-08-02 23:32:29 +02:00
Benjamin Gamard
608b2f868d Doc about prebuilt Docker image for bulk importer 2020-06-24 21:54:09 +02:00
Benjamin Gamard
46638bab5b Build and push docs-importer to Docker Hub 2020-06-24 21:18:37 +02:00
Carl Reid
4607362e46 Add file filter to importer (#426) 2020-06-23 22:31:49 +02:00
Cornelicorn
041b2dfcc1 Fix tag-adding bugs of the importer (#427) 2020-06-21 14:48:13 +02:00
Carl Reid
7ad0dd43e2 Remove COPY node_modules from Dockerfile (#425) 2020-06-21 13:45:23 +02:00
buherman11
35339f7328 Fixed sending workflow emails to previous assignee (#422) 2020-06-10 12:04:09 +02:00
Gabisonfire
e474e7cd75 Added option to copy a file before it is deleted after being imported (#418) 2020-06-07 20:13:04 +02:00
Cornelicorn
612fab2aef Improve the file importer (#415)
Improve the bulk importer (tags by filename, document language, Docker container)
2020-05-22 15:18:19 +02:00
bgamard
3f67bd471b increase default heap space to 1GB 2020-05-19 15:33:23 +02:00
bgamard
cb29dcd6cc handle all content extraction errors 2020-05-19 15:11:05 +02:00
bgamard
d428e89c30 #412: process files outside of a transaction 2020-05-19 14:44:41 +02:00
bgamard
9b2aeb7480 add temporary logs 2020-05-19 14:05:34 +02:00
bgamard
d9ad69c7ff add more log to debug processing indicator 2020-05-17 21:58:13 +02:00
bgamard
16fc058264 Merge remote-tracking branch 'origin/master' 2020-05-17 21:00:30 +02:00
bgamard
520b143165 #412: better handle concurrent updates and async listeners 2020-05-17 21:00:01 +02:00
cadast
95c37a03f8 Improve Inbox Scanning (#407)
closes #386: delete emails after import + closes #405: auto tag documents imported by email
2020-05-14 13:59:11 +02:00
bgamard
0d058b9c9c at least 2 threads for background work 2020-05-07 11:09:11 +02:00
bgamard
7c72b5e69b #401: importer: truncate document title to allowed size 2020-05-06 13:56:45 +02:00
bgamard
3ec254e908 #400: limit async bus to (cpu cores / 2) threads 2020-05-06 11:35:45 +02:00
bgamard
fda13c004e Merge remote-tracking branch 'origin/master' 2020-04-21 21:13:54 +02:00
bgamard
3af85eeea6 bump importer version 2020-04-21 21:13:20 +02:00
lord-lawnmower
c08616e6df Latvian Language Support (#390) 2020-04-13 21:04:47 +02:00
Antti Tapio
7faa0f8a54 Finnish and Swedish language support (#389) 2020-04-12 18:27:06 +02:00
bgamard
26c5fe2e69 next dev iteration 2020-03-26 20:06:58 +01:00
bgamard
6bdaa8352b update README.md 2020-03-26 20:00:44 +01:00
Jean-Marc Tremeaux
94252de73f Merge remote-tracking branch 'origin/master' 2018-10-23 18:16:35 +02:00
Jean-Marc Tremeaux
d43072663e Merge remote-tracking branch 'origin/master' 2017-03-21 09:04:20 +01:00
Jean-Marc Tremeaux
eb3562567d Fix dockerfile 2017-03-21 08:51:58 +01:00
156 changed files with 5208 additions and 1543 deletions

84
.github/workflows/build-deploy.yml vendored Normal file
View File

@@ -0,0 +1,84 @@
name: Maven CI/CD
on:
push:
branches: [master]
tags: [v*]
workflow_dispatch:
jobs:
build_and_publish:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up JDK 11
uses: actions/setup-java@v2
with:
java-version: "11"
distribution: "temurin"
cache: maven
- name: Install test dependencies
run: sudo apt-get -y -q --no-install-recommends install ffmpeg mediainfo tesseract-ocr tesseract-ocr-deu
- name: Build with Maven
run: mvn -Pprod clean install
- name: Upload war artifact
uses: actions/upload-artifact@v2
with:
name: docs-web-ci.war
path: docs-web/target/docs*.war
build_docker_image:
name: Publish to Docker Hub
runs-on: ubuntu-latest
needs: [build_and_publish]
steps:
-
name: Checkout
uses: actions/checkout@v2
-
name: Download war artifact
uses: actions/download-artifact@v2
with:
name: docs-web-ci.war
path: docs-web/target
-
name: Setup up Docker Buildx
uses: docker/setup-buildx-action@v1
-
name: Login to DockerHub
if: github.event_name != 'pull_request'
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
-
name: Populate Docker metadata
id: metadata
uses: docker/metadata-action@v3
with:
images: sismics/docs
flavor: |
latest=false
tags: |
type=ref,event=tag
type=raw,value=latest,enable=${{ github.ref_type != 'tag' }}
labels: |
org.opencontainers.image.title = Teedy
org.opencontainers.image.description = Teedy is an open source, lightweight document management system for individuals and businesses.
org.opencontainers.image.created = ${{ github.event_created_at }}
org.opencontainers.image.author = Sismics
org.opencontainers.image.url = https://teedy.io/
org.opencontainers.image.vendor = Sismics
org.opencontainers.image.license = GPLv2
org.opencontainers.image.version = ${{ github.event_head_commit.id }}
-
name: Build and push
id: docker_build
uses: docker/build-push-action@v2
with:
context: .
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}

11
.gitignore vendored
View File

@@ -11,6 +11,11 @@
*.iml
node_modules
import_test
docs-importer-linux
docs-importer-macos
docs-importer-win.exe
teedy-importer-linux
teedy-importer-macos
teedy-importer-win.exe
docs/*
!docs/.gitkeep
#macos
.DS_Store

View File

@@ -1,26 +0,0 @@
sudo: required
dist: trusty
language: java
before_install:
- sudo add-apt-repository -y ppa:mc3man/trusty-media
- sudo apt-get -qq update
- sudo apt-get -y -q install ffmpeg mediainfo tesseract-ocr tesseract-ocr-fra tesseract-ocr-ita tesseract-ocr-kor tesseract-ocr-rus tesseract-ocr-ukr tesseract-ocr-spa tesseract-ocr-ara tesseract-ocr-hin tesseract-ocr-deu tesseract-ocr-pol tesseract-ocr-jpn tesseract-ocr-por tesseract-ocr-tha tesseract-ocr-jpn tesseract-ocr-chi-sim tesseract-ocr-chi-tra tesseract-ocr-nld tesseract-ocr-tur tesseract-ocr-heb tesseract-ocr-hun
- sudo apt-get -y -q install haveged && sudo service haveged start
after_success:
- |
if [ "$TRAVIS_PULL_REQUEST" == "false" ]; then
mvn -Pprod -DskipTests clean install
docker login -u $DOCKER_USER -p $DOCKER_PASS
export REPO=sismics/docs
export TAG=`if [ "$TRAVIS_BRANCH" == "master" ]; then echo "latest"; else echo $TRAVIS_BRANCH ; fi`
docker build -f Dockerfile -t $REPO:$COMMIT .
docker tag $REPO:$COMMIT $REPO:$TAG
docker tag $REPO:$COMMIT $REPO:travis-$TRAVIS_BUILD_NUMBER
docker push $REPO
fi
env:
global:
- secure: LRGpjWORb0qy6VuypZjTAfA8uRHlFUMTwb77cenS9PPRBxuSnctC531asS9Xg3DqC5nsRxBBprgfCKotn5S8nBSD1ceHh84NASyzLSBft3xSMbg7f/2i7MQ+pGVwLncusBU6E/drnMFwZBleo+9M8Tf96axY5zuUp90MUTpSgt0=
- secure: bCDDR6+I7PmSkuTYZv1HF/z98ANX/SFEESUCqxVmV5Gs0zFC0vQXaPJQ2xaJNRop1HZBFMZLeMMPleb0iOs985smpvK2F6Rbop9Tu+Vyo0uKqv9tbZ7F8Nfgnv9suHKZlL84FNeUQZJX6vsFIYPEJ/r7K5P/M0PdUy++fEwxEhU=
- secure: ewXnzbkgCIHpDWtaWGMa1OYZJ/ki99zcIl4jcDPIC0eB3njX/WgfcC6i0Ke9mLqDqwXarWJ6helm22sNh+xtQiz6isfBtBX+novfRt9AANrBe3koCMUemMDy7oh5VflBaFNP0DVb8LSCnwf6dx6ZB5E9EB8knvk40quc/cXpGjY=
- COMMIT=${TRAVIS_COMMIT::8}

View File

@@ -1,7 +1,37 @@
FROM sismics/ubuntu-jetty:9.4.12
MAINTAINER b.gamard@sismics.com
FROM sismics/ubuntu-jetty:9.4.51
LABEL maintainer="b.gamard@sismics.com"
RUN apt-get update && apt-get -y -q install ffmpeg mediainfo tesseract-ocr tesseract-ocr-fra tesseract-ocr-ita tesseract-ocr-kor tesseract-ocr-rus tesseract-ocr-ukr tesseract-ocr-spa tesseract-ocr-ara tesseract-ocr-hin tesseract-ocr-deu tesseract-ocr-pol tesseract-ocr-jpn tesseract-ocr-por tesseract-ocr-tha tesseract-ocr-jpn tesseract-ocr-chi-sim tesseract-ocr-chi-tra tesseract-ocr-nld tesseract-ocr-tur tesseract-ocr-heb tesseract-ocr-hun && \
RUN apt-get update && \
apt-get -y -q --no-install-recommends install \
ffmpeg \
mediainfo \
tesseract-ocr \
tesseract-ocr-ara \
tesseract-ocr-ces \
tesseract-ocr-chi-sim \
tesseract-ocr-chi-tra \
tesseract-ocr-dan \
tesseract-ocr-deu \
tesseract-ocr-fin \
tesseract-ocr-fra \
tesseract-ocr-heb \
tesseract-ocr-hin \
tesseract-ocr-hun \
tesseract-ocr-ita \
tesseract-ocr-jpn \
tesseract-ocr-kor \
tesseract-ocr-lav \
tesseract-ocr-nld \
tesseract-ocr-nor \
tesseract-ocr-pol \
tesseract-ocr-por \
tesseract-ocr-rus \
tesseract-ocr-spa \
tesseract-ocr-swe \
tesseract-ocr-tha \
tesseract-ocr-tur \
tesseract-ocr-ukr \
tesseract-ocr-vie && \
apt-get clean && rm -rf /var/lib/apt/lists/*
# Remove the embedded javax.mail jar from Jetty
@@ -9,3 +39,5 @@ RUN rm -f /opt/jetty/lib/mail/javax.mail.glassfish-*.jar
ADD docs.xml /opt/jetty/webapps/docs.xml
ADD docs-web/target/docs-web-*.war /opt/jetty/webapps/docs.war
ENV JAVA_OPTIONS -Xmx1g

175
README.md
View File

@@ -2,9 +2,7 @@
<img src="https://teedy.io/img/github-title.png" alt="Teedy" width=500 />
</h3>
[![Twitter: @teedyio](https://img.shields.io/badge/contact-@teedyio-blue.svg?style=flat)](https://twitter.com/teedyio)
[![License: GPL v2](https://img.shields.io/badge/License-GPL%20v2-blue.svg)](https://www.gnu.org/licenses/old-licenses/gpl-2.0.en.html)
[![Build Status](https://secure.travis-ci.org/sismics/docs.png)](http://travis-ci.org/sismics/docs)
Teedy is an open source, lightweight document management system for individuals and businesses.
@@ -16,19 +14,19 @@ Teedy is an open source, lightweight document management system for individuals
![New!](https://teedy.io/img/laptop-demo.png?20180301)
Demo
----
# Demo
A demo is available at [demo.teedy.io](https://demo.teedy.io)
- Guest login is enabled with read access on all documents
- "admin" login with "admin" password
- "demo" login with "password" password
Features
--------
# Features
- Responsive user interface
- Optical character recognition
- LDAP authentication ![New!](https://www.sismics.com/public/img/new.png)
- Support image, PDF, ODT, DOCX, PPTX files
- Video file support
- Flexible search engine with suggestions and highlighting
@@ -54,76 +52,191 @@ Features
- [Bulk files importer](https://github.com/sismics/docs/tree/master/docs-importer) (single or scan mode)
- Tested to one million documents
Install with Docker
-------------------
# Install with Docker
A preconfigured Docker image is available, including OCR and media conversion tools, listening on port 8080. The database is an embedded H2 database but PostgreSQL is also supported for more performance.
**The default admin password is "admin". Don't forget to change it before going to production.**
- Master branch, can be unstable. Not recommended for production use: `sismics/docs:latest`
- Latest stable version: `sismics/docs:v1.7`
- Latest stable version: `sismics/docs:v1.11`
The data directory is `/data`. Don't forget to mount a volume on it.
To build external URL, the server is expecting a `DOCS_BASE_URL` environment variable (for example https://teedy.mycompany.com)
Manual installation
-------------------
## Available environment variables
#### Requirements
- Java 8 with the [Java Cryptography Extension](http://www.oracle.com/technetwork/java/javase/downloads/jce-7-download-432124.html)
- Tesseract 3 or 4 for OCR
- General
- `DOCS_BASE_URL`: The base url used by the application. Generated url's will be using this as base.
- `DOCS_GLOBAL_QUOTA`: Defines the default quota applying to all users.
- `DOCS_BCRYPT_WORK`: Defines the work factor which is used for password hashing. The default is `10`. This value may be `4...31` including `4` and `31`. The specified value will be used for all new users and users changing their password. Be aware that setting this factor to high can heavily impact login and user creation performance.
- Admin
- `DOCS_ADMIN_EMAIL_INIT`: Defines the e-mail-address the admin user should have upon initialization.
- `DOCS_ADMIN_PASSWORD_INIT`: Defines the password the admin user should have upon initialization. Needs to be a bcrypt hash. **Be aware that `$` within the hash have to be escaped with a second `$`.**
- Database
- `DATABASE_URL`: The jdbc connection string to be used by `hibernate`.
- `DATABASE_USER`: The user which should be used for the database connection.
- `DATABASE_PASSWORD`: The password to be used for the database connection.
- Language
- `DOCS_DEFAULT_LANGUAGE`: The language which will be used as default. Currently supported values are:
- `eng`, `fra`, `ita`, `deu`, `spa`, `por`, `pol`, `rus`, `ukr`, `ara`, `hin`, `chi_sim`, `chi_tra`, `jpn`, `tha`, `kor`, `nld`, `tur`, `heb`, `hun`, `fin`, `swe`, `lav`, `dan`
- E-Mail
- `DOCS_SMTP_HOSTNAME`: Hostname of the SMTP-Server to be used by Teedy.
- `DOCS_SMTP_PORT`: The port which should be used.
- `DOCS_SMTP_USERNAME`: The username to be used.
- `DOCS_SMTP_PASSWORD`: The password to be used.
## Examples
In the following examples some passwords are exposed in cleartext. This was done in order to keep the examples simple. We strongly encourage you to use variables with an `.env` file or other means to securely store your passwords.
### Using the internal database
```yaml
version: '3'
services:
# Teedy Application
teedy-server:
image: sismics/docs:v1.11
restart: unless-stopped
ports:
# Map internal port to host
- 8080:8080
environment:
# Base url to be used
DOCS_BASE_URL: "https://docs.example.com"
# Set the admin email
DOCS_ADMIN_EMAIL_INIT: "admin@example.com"
# Set the admin password (in this example: "superSecure")
DOCS_ADMIN_PASSWORD_INIT: "$$2a$$05$$PcMNUbJvsk7QHFSfEIDaIOjk1VI9/E7IPjTKx.jkjPxkx2EOKSoPS"
volumes:
- ./docs/data:/data
```
### Using PostgreSQL
```yaml
version: '3'
services:
# Teedy Application
teedy-server:
image: sismics/docs:v1.11
restart: unless-stopped
ports:
# Map internal port to host
- 8080:8080
environment:
# Base url to be used
DOCS_BASE_URL: "https://docs.example.com"
# Set the admin email
DOCS_ADMIN_EMAIL_INIT: "admin@example.com"
# Set the admin password (in this example: "superSecure")
DOCS_ADMIN_PASSWORD_INIT: "$$2a$$05$$PcMNUbJvsk7QHFSfEIDaIOjk1VI9/E7IPjTKx.jkjPxkx2EOKSoPS"
# Setup the database connection. "teedy-db" is the hostname
# and "teedy" is the name of the database the application
# will connect to.
DATABASE_URL: "jdbc:postgresql://teedy-db:5432/teedy"
DATABASE_USER: "teedy_db_user"
DATABASE_PASSWORD: "teedy_db_password"
volumes:
- ./docs/data:/data
networks:
- docker-internal
- internet
depends_on:
- teedy-db
# DB for Teedy
teedy-db:
image: postgres:13.1-alpine
restart: unless-stopped
expose:
- 5432
environment:
POSTGRES_USER: "teedy_db_user"
POSTGRES_PASSWORD: "teedy_db_password"
POSTGRES_DB: "teedy"
volumes:
- ./docs/db:/var/lib/postgresql/data
networks:
- docker-internal
networks:
# Network without internet access. The db does not need
# access to the host network.
docker-internal:
driver: bridge
internal: true
internet:
driver: bridge
```
# Manual installation
## Requirements
- Java 11
- Tesseract 4 for OCR
- ffmpeg for video thumbnails
- mediainfo for video metadata extraction
- A webapp server like [Jetty](http://eclipse.org/jetty/) or [Tomcat](http://tomcat.apache.org/)
#### Download
## Download
The latest release is downloadable here: <https://github.com/sismics/docs/releases> in WAR format.
**The default admin password is "admin". Don't forget to change it before going to production.**
How to build Teedy from the sources
----------------------------------
## How to build Teedy from the sources
Prerequisites: JDK 8 with JCE, Maven 3, NPM, Grunt, Tesseract 3 or 4
Prerequisites: JDK 11, Maven 3, NPM, Grunt, Tesseract 4
Teedy is organized in several Maven modules:
- docs-core
- docs-web
- docs-web-common
- docs-core
- docs-web
- docs-web-common
First off, clone the repository: `git clone git://github.com/sismics/docs.git`
or download the sources from GitHub.
#### Launch the build
### Launch the build
From the root directory:
mvn clean -DskipTests install
```console
mvn clean -DskipTests install
```
#### Run a stand-alone version
### Run a stand-alone version
From the `docs-web` directory:
mvn jetty:run
```console
mvn jetty:run
```
#### Build a .war to deploy to your servlet container
### Build a .war to deploy to your servlet container
From the `docs-web` directory:
mvn -Pprod -DskipTests clean install
```console
mvn -Pprod -DskipTests clean install
```
You will get your deployable WAR in the `docs-web/target` directory.
Contributing
------------
# Contributing
All contributions are more than welcomed. Contributions may close an issue, fix a bug (reported or not reported), improve the existing code, add new feature, and so on.
The `master` branch is the default and base branch for the project. It is used for development and all Pull Requests should go there.
License
-------
# License
Teedy is released under the terms of the GPL license. See `COPYING` for more
information or see <http://opensource.org/licenses/GPL-2.0>.

18
docker-compose.yml Normal file
View File

@@ -0,0 +1,18 @@
version: '3'
services:
# Teedy Application
teedy-server:
image: sismics/docs:v1.10
restart: unless-stopped
ports:
# Map internal port to host
- 8080:8080
environment:
# Base url to be used
DOCS_BASE_URL: "https://docs.example.com"
# Set the admin email
DOCS_ADMIN_EMAIL_INIT: "admin@example.com"
# Set the admin password (in this example: "superSecure")
DOCS_ADMIN_PASSWORD_INIT: "$$2a$$05$$PcMNUbJvsk7QHFSfEIDaIOjk1VI9/E7IPjTKx.jkjPxkx2EOKSoPS"
volumes:
- ./docs/data:/data

View File

@@ -34,6 +34,7 @@ public class LanguageAdapter extends BaseAdapter {
languageList.add(new Language("fra", R.string.language_french, R.drawable.fra));
languageList.add(new Language("eng", R.string.language_english, R.drawable.eng));
languageList.add(new Language("deu", R.string.language_german, R.drawable.deu));
languageList.add(new Language("pol", R.string.language_polish, R.drawable.pol));
}
@Override

View File

@@ -39,7 +39,9 @@ public class SearchQueryBuilder {
*/
public SearchQueryBuilder simpleSearch(String simpleSearch) {
if (isValid(simpleSearch)) {
query.append(SEARCH_SEPARATOR).append(simpleSearch);
query.append(SEARCH_SEPARATOR)
.append("simple:")
.append(simpleSearch);
}
return this;
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 238 B

View File

@@ -0,0 +1,164 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<!-- Validation -->
<string name="validate_error_email">Nieprawidłowy email</string>
<string name="validate_error_length_min">Za krótki (min. %d)</string>
<string name="validate_error_length_max">Za długi (max. %d)</string>
<string name="validate_error_required">Wymagany</string>
<string name="validate_error_alphanumeric">Tylko litery i cyfry</string>
<!-- App -->
<string name="app_name" translatable="false">Teedy</string>
<string name="drawer_open">Otwórz szufladę nawigacji</string>
<string name="drawer_close">Zamknij szufladę nawigacji</string>
<string name="login_explain"><![CDATA[Aby rozpocząć, musisz pobrać i zainstalować serwer Teedy na <a href="https://github.com/sismics/docs">github.com/sismics/docs</a> i poniżej wprowadzić adres]]></string>
<string name="server">Serwer</string>
<string name="username">Użytkownik</string>
<string name="password">Hasło</string>
<string name="login">Zaloguj</string>
<string name="ok">OK</string>
<string name="cancel">Anuluj</string>
<string name="login_fail_title">Błąd logowania</string>
<string name="login_fail">Nieprawidłowa nazwa użytkownika lub hasło</string>
<string name="network_error_title">Błąd sieci</string>
<string name="network_error">Błąd sieci, sprawdź połączenie z interneterm oraz adres URL serwera</string>
<string name="invalid_url_title">Nieprawidłowy adres URL</string>
<string name="invalid_url">Sprawdź adres URL serwera i spróbuj ponownie</string>
<string name="crash_toast_text">Wystąpiła awaria, wysłano raport w celu rozwiązania tego problemu</string>
<string name="created_date">Data utworzenia</string>
<string name="download_file">Pobierz bieżący plik</string>
<string name="download_document">Pobierz</string>
<string name="action_search">Znadź dokumenty</string>
<string name="all_documents">Wszystkie dokumenty</string>
<string name="shared_documents">Udostępnione dokumenty</string>
<string name="all_tags">Wszystkie etykiety</string>
<string name="no_tags">Brak etykiet</string>
<string name="error_loading_tags">Błąd ładowania etykiet</string>
<string name="no_documents">Brak dokumentów</string>
<string name="error_loading_documents">Błąd ładowania dokumentów</string>
<string name="no_files">Brak plików</string>
<string name="error_loading_files">Błąd ładowania plików</string>
<string name="new_document">Nowy dokument</string>
<string name="share">Udostępnij</string>
<string name="close">Zamknij</string>
<string name="add">Dodaj</string>
<string name="add_share_hint">Nazwa udostępnienia (opcjonalnie)</string>
<string name="document_not_shared">Ten dokument nie jest obecnie udostępniony</string>
<string name="delete_share">Usuń udostępnienie</string>
<string name="send_share">Wyślij link udostępnienia</string>
<string name="error_loading_shares">Błąd ładowania udostępnień</string>
<string name="error_adding_share">Błąd dodawania udostępnienia</string>
<string name="share_default_name">Udostępnij link</string>
<string name="error_deleting_share">Błąd usuwania udostępnienia</string>
<string name="send_share_to">Wyślij link udostępnienia do</string>
<string name="upload_file">dodaj plik</string>
<string name="upload_from">Przeslij plik z</string>
<string name="settings">ustawienia</string>
<string name="logout">Wyloguj</string>
<string name="version">Wersja</string>
<string name="build">Kompilacja</string>
<string name="pref_advanced_category">Ustawienia zaawansowane</string>
<string name="pref_about_category">O programie</string>
<string name="pref_github">GitHub</string>
<string name="pref_issue">Zgłoś błąd</string>
<string name="pref_clear_cache_title">Wyczyść cache</string>
<string name="pref_clear_cache_summary">Wyczyść podręczne pliki</string>
<string name="pref_clear_cache_success">Cache wyczyszczony</string>
<string name="pref_clear_history_title">Wyczyść historię wyszukiwania</string>
<string name="pref_clear_history_summary">Opróżnij ostatnie sugestie wyszukiwania</string>
<string name="pref_clear_history_success">Historia wyszukiwania wyczyszczona</string>
<string name="pref_cache_size">Rozmiar cache</string>
<string name="language_french" translatable="false">Francuski</string>
<string name="language_english" translatable="false">Angielski</string>
<string name="language_german" translatable="false">Niemiecki</string>
<string name="language_polish" translatable="false">Polski</string>
<string name="save">Zapisz</string>
<string name="edit_document">Edytuj</string>
<string name="error_editing_document">Błąd sieci, spróbuj ponownie</string>
<string name="please_wait">Proszę czekać</string>
<string name="document_editing_message">Wysyłam twoje dane</string>
<string name="delete_document">Usuń</string>
<string name="delete_document_title">Usuń dokument</string>
<string name="delete_document_message">Naprawdę chcesz usunąć dokument i powiązane z nim pliki?</string>
<string name="document_delete_failure">Błąd sieci w czasie usuwania tego dokumentu</string>
<string name="document_deleting_message">Usuwanie dokumentu</string>
<string name="delete_file_title">Usuń plik</string>
<string name="delete_file_message">Naprawdę chcesz usunąć ten plik?</string>
<string name="file_delete_failure">Błąd sieci w czasie usuwania bieżącego pliku</string>
<string name="file_deleting_message">Usuwanie pliku</string>
<string name="error_reading_file">Błąd podczas odczytu pliku</string>
<string name="upload_notification_title">Teedy</string>
<string name="upload_notification_message">Przesyłanie nowego pliku do dokumentu</string>
<string name="upload_notification_error">Błąd przsyłania nowego pliku</string>
<string name="delete_file">Usuń bieżący plik</string>
<string name="advanced_search">Zaawansowane wyszukiwanie</string>
<string name="search">Znajdź</string>
<string name="add_tags">Dodaj eytkiety</string>
<string name="creation_date">Data utworzenia</string>
<string name="description">Opis</string>
<string name="title">Tytuł</string>
<string name="simple_search">Proste wyszukiwanie</string>
<string name="fulltext_search">Wyszukiwanie pełnotekstowe</string>
<string name="creator">Autor</string>
<string name="after_date">Po dacie</string>
<string name="before_date">Przed datą</string>
<string name="search_tags">Znajdź etykiety</string>
<string name="all_languages">Wszystkie języki</string>
<string name="toggle_informations">Przełącz informacje</string>
<string name="who_can_access">Kto ma dostęp</string>
<string name="comments">Komentarze</string>
<string name="no_comments">Brak komentarzy</string>
<string name="error_loading_comments">Błąd ładowania komentarzy</string>
<string name="send">Wyślij</string>
<string name="add_comment">Dodaj komentarz</string>
<string name="comment_add_failure">Błąd dodawania komentarza</string>
<string name="adding_comment">Dodawanie komentarza</string>
<string name="comment_delete">Usuń komentarz</string>
<string name="deleting_comment">Usuwanie komentarza</string>
<string name="error_deleting_comment">Błąd usuwania komentarza</string>
<string name="export_pdf">PDF</string>
<string name="download">Pobierz</string>
<string name="margin">Margines</string>
<string name="fit_image_to_page">Dostosuj obraz do strony</string>
<string name="export_comments">Eksport komentarzy</string>
<string name="export_metadata">Eksport metadanych</string>
<string name="mm">mm</string>
<string name="download_file_title">Eksport plików Teedy</string>
<string name="download_document_title">Eksport dokumentu Teedy</string>
<string name="download_pdf_title">Eksport Teedy jako PDF</string>
<string name="latest_activity">Ostatnie aktywności</string>
<string name="activity">Aktywności</string>
<string name="email">E-mail</string>
<string name="storage_quota">Limit magazynu</string>
<string name="storage_display">%1$d/%2$d MB</string>
<string name="validation_code">Kod weryfikujący</string>
<string name="shared">Udostępnienie</string>
<string name="language">Język</string>
<string name="coverage">Zakres</string>
<string name="type">Rodzaj</string>
<string name="source">Źródło</string>
<string name="format">Format</string>
<string name="publisher">Udostępniający</string>
<string name="identifier">Identifikator</string>
<string name="subject">temat</string>
<string name="rights">Prawa</string>
<string name="contributors">Współtwórcy</string>
<string name="relations">Powiązania</string>
<!-- Audit log -->
<string name="auditlog_Acl">ACL</string>
<string name="auditlog_Comment">Komentarz</string>
<string name="auditlog_Document">Dokument</string>
<string name="auditlog_File">Plik</string>
<string name="auditlog_Group">Grupa</string>
<string name="auditlog_Route">Przepływ</string>
<string name="auditlog_RouteModel">Model przepływu</string>
<string name="auditlog_Tag">Etykieta</string>
<string name="auditlog_User">Użytkownik</string>
<string name="auditlog_Webhook">Webhook</string>
<string name="auditlog_created">utworzony</string>
<string name="auditlog_updated">zaktualizowany</string>
<string name="auditlog_deleted">usunięty</string>
</resources>

View File

@@ -72,6 +72,7 @@
<string name="language_french" translatable="false">Français</string>
<string name="language_english" translatable="false">English</string>
<string name="language_german" translatable="false">Deutsch</string>
<string name="language_polish" translatable="false">Polski</string>
<string name="save">Save</string>
<string name="edit_document">Edit</string>
<string name="error_editing_document">Network error, please try again</string>

View File

@@ -5,7 +5,7 @@
<parent>
<groupId>com.sismics.docs</groupId>
<artifactId>docs-parent</artifactId>
<version>1.8</version>
<version>1.11</version>
<relativePath>..</relativePath>
</parent>
@@ -91,10 +91,10 @@
<groupId>org.slf4j</groupId>
<artifactId>jcl-over-slf4j</artifactId>
</dependency>
<dependency>
<groupId>org.mindrot</groupId>
<artifactId>jbcrypt</artifactId>
<groupId>at.favre.lib</groupId>
<artifactId>bcrypt</artifactId>
</dependency>
<dependency>
@@ -131,7 +131,12 @@
<groupId>com.squareup.okhttp3</groupId>
<artifactId>okhttp</artifactId>
</dependency>
<dependency>
<groupId>org.apache.directory.api</groupId>
<artifactId>api-all</artifactId>
</dependency>
<!-- Only there to read old index and rebuild them -->
<dependency>
<groupId>org.apache.lucene</groupId>

View File

@@ -42,5 +42,21 @@ public enum ConfigType {
INBOX_PORT,
INBOX_USERNAME,
INBOX_PASSWORD,
INBOX_TAG
INBOX_FOLDER,
INBOX_TAG,
INBOX_AUTOMATIC_TAGS,
INBOX_DELETE_IMPORTED,
/**
* LDAP connection.
*/
LDAP_ENABLED,
LDAP_HOST,
LDAP_PORT,
LDAP_ADMIN_DN,
LDAP_ADMIN_PASSWORD,
LDAP_BASE_DN,
LDAP_FILTER,
LDAP_DEFAULT_EMAIL,
LDAP_DEFAULT_STORAGE
}

View File

@@ -18,13 +18,18 @@ public class Constants {
/**
* Administrator's default password ("admin").
*/
public static final String DEFAULT_ADMIN_PASSWORD = "$2a$05$6Ny3TjrW3aVAL1or2SlcR.fhuDgPKp5jp.P9fBXwVNePgeLqb4i3C";
public static final String DEFAULT_ADMIN_PASSWORD = "$2y$10$xg0EEKVUehutDI1m6qQhVeFz7SMQMl1jQzjf2KkVsR2c7aV2vyyjK";
/**
* Administrator's default email.
*/
public static final String DEFAULT_ADMIN_EMAIL = "admin@localhost";
/**
* Bcrypt default work factor
*/
public static final int DEFAULT_BCRYPT_WORK = 10;
/**
* Guest user ID.
*/
@@ -38,7 +43,7 @@ public class Constants {
/**
* Supported document languages.
*/
public static final List<String> SUPPORTED_LANGUAGES = Lists.newArrayList("eng", "fra", "ita", "deu", "spa", "por", "pol", "rus", "ukr", "ara", "hin", "chi_sim", "chi_tra", "jpn", "tha", "kor", "nld", "tur", "heb", "hun");
public static final List<String> SUPPORTED_LANGUAGES = Lists.newArrayList("eng", "fra", "ita", "deu", "spa", "por", "pol", "rus", "ukr", "ara", "hin", "chi_sim", "chi_tra", "jpn", "tha", "kor", "nld", "tur", "heb", "hun", "fin", "swe", "lav", "dan", "nor", "vie", "ces");
/**
* Base URL environment variable.
@@ -73,6 +78,11 @@ public class Constants {
*/
public static final String ADMIN_EMAIL_INIT_ENV = "DOCS_ADMIN_EMAIL_INIT";
/**
* Work factor to be used by Bcrypt
*/
public static final String BCRYPT_WORK_ENV = "DOCS_BCRYPT_WORK";
/**
* Expiration time of the password recovery in hours.
*/

View File

@@ -128,6 +128,9 @@ public class AclDao {
if (SecurityUtil.skipAclCheck(targetIdList)) {
return true;
}
if (targetIdList.isEmpty()) {
return false;
}
EntityManager em = ThreadLocalContext.get().getEntityManager();
StringBuilder sb = new StringBuilder("select a.ACL_ID_C from T_ACL a ");

View File

@@ -10,6 +10,7 @@ import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.Query;
import javax.persistence.TypedQuery;
import java.sql.Timestamp;
import java.util.Date;
import java.util.List;
@@ -50,10 +51,9 @@ public class DocumentDao {
* @param limit Limit
* @return List of documents
*/
@SuppressWarnings("unchecked")
public List<Document> findAll(int offset, int limit) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
Query q = em.createQuery("select d from Document d where d.deleteDate is null");
TypedQuery<Document> q = em.createQuery("select d from Document d where d.deleteDate is null", Document.class);
q.setFirstResult(offset);
q.setMaxResults(limit);
return q.getResultList();
@@ -65,10 +65,9 @@ public class DocumentDao {
* @param userId User ID
* @return List of documents
*/
@SuppressWarnings("unchecked")
public List<Document> findByUserId(String userId) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
Query q = em.createQuery("select d from Document d where d.userId = :userId and d.deleteDate is null");
TypedQuery<Document> q = em.createQuery("select d from Document d where d.userId = :userId and d.deleteDate is null", Document.class);
q.setParameter("userId", userId);
return q.getResultList();
}
@@ -138,16 +137,16 @@ public class DocumentDao {
EntityManager em = ThreadLocalContext.get().getEntityManager();
// Get the document
Query q = em.createQuery("select d from Document d where d.id = :id and d.deleteDate is null");
q.setParameter("id", id);
Document documentDb = (Document) q.getSingleResult();
TypedQuery<Document> dq = em.createQuery("select d from Document d where d.id = :id and d.deleteDate is null", Document.class);
dq.setParameter("id", id);
Document documentDb = dq.getSingleResult();
// Delete the document
Date dateNow = new Date();
documentDb.setDeleteDate(dateNow);
// Delete linked data
q = em.createQuery("update File f set f.deleteDate = :dateNow where f.documentId = :documentId and f.deleteDate is null");
Query q = em.createQuery("update File f set f.deleteDate = :dateNow where f.documentId = :documentId and f.deleteDate is null");
q.setParameter("documentId", id);
q.setParameter("dateNow", dateNow);
q.executeUpdate();
@@ -179,10 +178,10 @@ public class DocumentDao {
*/
public Document getById(String id) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
Query q = em.createQuery("select d from Document d where d.id = :id and d.deleteDate is null");
TypedQuery<Document> q = em.createQuery("select d from Document d where d.id = :id and d.deleteDate is null", Document.class);
q.setParameter("id", id);
try {
return (Document) q.getSingleResult();
return q.getSingleResult();
} catch (NoResultException e) {
return null;
}
@@ -199,9 +198,9 @@ public class DocumentDao {
EntityManager em = ThreadLocalContext.get().getEntityManager();
// Get the document
Query q = em.createQuery("select d from Document d where d.id = :id and d.deleteDate is null");
TypedQuery<Document> q = em.createQuery("select d from Document d where d.id = :id and d.deleteDate is null", Document.class);
q.setParameter("id", document.getId());
Document documentDb = (Document) q.getSingleResult();
Document documentDb = q.getSingleResult();
// Update the document
documentDb.setTitle(document.getTitle());
@@ -237,7 +236,6 @@ public class DocumentDao {
query.setParameter("fileId", document.getFileId());
query.setParameter("id", document.getId());
query.executeUpdate();
}
/**

View File

@@ -7,7 +7,8 @@ import com.sismics.util.context.ThreadLocalContext;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.Query;
import javax.persistence.TypedQuery;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.UUID;
@@ -47,10 +48,9 @@ public class FileDao {
* @param limit Limit
* @return List of files
*/
@SuppressWarnings("unchecked")
public List<File> findAll(int offset, int limit) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
Query q = em.createQuery("select f from File f where f.deleteDate is null");
TypedQuery<File> q = em.createQuery("select f from File f where f.deleteDate is null", File.class);
q.setFirstResult(offset);
q.setMaxResults(limit);
return q.getResultList();
@@ -62,28 +62,38 @@ public class FileDao {
* @param userId User ID
* @return List of files
*/
@SuppressWarnings("unchecked")
public List<File> findByUserId(String userId) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
Query q = em.createQuery("select f from File f where f.userId = :userId and f.deleteDate is null");
TypedQuery<File> q = em.createQuery("select f from File f where f.userId = :userId and f.deleteDate is null", File.class);
q.setParameter("userId", userId);
return q.getResultList();
}
/**
* Returns a list of active files.
*
* @param ids Files IDs
* @return List of files
*/
public List<File> getFiles(List<String> ids) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
TypedQuery<File> q = em.createQuery("select f from File f where f.id in :ids and f.deleteDate is null", File.class);
q.setParameter("ids", ids);
return q.getResultList();
}
/**
* Returns an active file.
* Returns an active file or null.
*
* @param id File ID
* @return Document
* @return File
*/
public File getFile(String id) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
Query q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null");
q.setParameter("id", id);
try {
return (File) q.getSingleResult();
} catch (NoResultException e) {
List<File> files = getFiles(List.of(id));
if (files.isEmpty()) {
return null;
} else {
return files.get(0);
}
}
@@ -92,15 +102,15 @@ public class FileDao {
*
* @param id File ID
* @param userId User ID
* @return Document
* @return File
*/
public File getFile(String id, String userId) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
Query q = em.createQuery("select f from File f where f.id = :id and f.userId = :userId and f.deleteDate is null");
TypedQuery<File> q = em.createQuery("select f from File f where f.id = :id and f.userId = :userId and f.deleteDate is null", File.class);
q.setParameter("id", id);
q.setParameter("userId", userId);
try {
return (File) q.getSingleResult();
return q.getSingleResult();
} catch (NoResultException e) {
return null;
}
@@ -116,9 +126,9 @@ public class FileDao {
EntityManager em = ThreadLocalContext.get().getEntityManager();
// Get the file
Query q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null");
TypedQuery<File> q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null", File.class);
q.setParameter("id", id);
File fileDb = (File) q.getSingleResult();
File fileDb = q.getSingleResult();
// Delete the file
Date dateNow = new Date();
@@ -138,9 +148,9 @@ public class FileDao {
EntityManager em = ThreadLocalContext.get().getEntityManager();
// Get the file
Query q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null");
TypedQuery<File> q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null", File.class);
q.setParameter("id", file.getId());
File fileDb = (File) q.getSingleResult();
File fileDb = q.getSingleResult();
// Update the file
fileDb.setDocumentId(file.getDocumentId());
@@ -153,7 +163,7 @@ public class FileDao {
return file;
}
/**
* Gets a file by its ID.
*
@@ -162,32 +172,43 @@ public class FileDao {
*/
public File getActiveById(String id) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
Query q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null");
TypedQuery<File> q = em.createQuery("select f from File f where f.id = :id and f.deleteDate is null", File.class);
q.setParameter("id", id);
try {
return (File) q.getSingleResult();
return q.getSingleResult();
} catch (NoResultException e) {
return null;
}
}
/**
* Get files by document ID or all orphan files of an user.
* Get files by document ID or all orphan files of a user.
*
* @param userId User ID
* @param documentId Document ID
* @return List of files
*/
@SuppressWarnings("unchecked")
public List<File> getByDocumentId(String userId, String documentId) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
if (documentId == null) {
Query q = em.createQuery("select f from File f where f.documentId is null and f.deleteDate is null and f.latestVersion = true and f.userId = :userId order by f.createDate asc");
TypedQuery<File> q = em.createQuery("select f from File f where f.documentId is null and f.deleteDate is null and f.latestVersion = true and f.userId = :userId order by f.createDate asc", File.class);
q.setParameter("userId", userId);
return q.getResultList();
} else {
return getByDocumentsIds(Collections.singleton(documentId));
}
Query q = em.createQuery("select f from File f where f.documentId = :documentId and f.latestVersion = true and f.deleteDate is null order by f.order asc");
q.setParameter("documentId", documentId);
}
/**
* Get files by documents IDs.
*
* @param documentIds Documents IDs
* @return List of files
*/
public List<File> getByDocumentsIds(Iterable<String> documentIds) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
TypedQuery<File> q = em.createQuery("select f from File f where f.documentId in :documentIds and f.latestVersion = true and f.deleteDate is null order by f.order asc", File.class);
q.setParameter("documentIds", documentIds);
return q.getResultList();
}
@@ -197,10 +218,9 @@ public class FileDao {
* @param versionId Version ID
* @return List of files
*/
@SuppressWarnings("unchecked")
public List<File> getByVersionId(String versionId) {
EntityManager em = ThreadLocalContext.get().getEntityManager();
Query q = em.createQuery("select f from File f where f.versionId = :versionId and f.deleteDate is null order by f.order asc");
TypedQuery<File> q = em.createQuery("select f from File f where f.versionId = :versionId and f.deleteDate is null order by f.order asc", File.class);
q.setParameter("versionId", versionId);
return q.getResultList();
}

View File

@@ -183,12 +183,10 @@ public class GroupDao {
}
criteriaList.add("g.GRP_DELETEDATE_D is null");
if (!criteriaList.isEmpty()) {
sb.append(" where ");
sb.append(Joiner.on(" and ").join(criteriaList));
}
sb.append(" where ");
sb.append(Joiner.on(" and ").join(criteriaList));
// Perform the search
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);
@SuppressWarnings("unchecked")

View File

@@ -123,10 +123,8 @@ public class MetadataDao {
criteriaList.add("m.MET_DELETEDATE_D is null");
if (!criteriaList.isEmpty()) {
sb.append(" where ");
sb.append(Joiner.on(" and ").join(criteriaList));
}
sb.append(" where ");
sb.append(Joiner.on(" and ").join(criteriaList));
// Perform the search
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);

View File

@@ -64,10 +64,8 @@ public class RouteDao {
}
criteriaList.add("r.RTE_DELETEDATE_D is null");
if (!criteriaList.isEmpty()) {
sb.append(" where ");
sb.append(Joiner.on(" and ").join(criteriaList));
}
sb.append(" where ");
sb.append(Joiner.on(" and ").join(criteriaList));
// Perform the search
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);

View File

@@ -145,10 +145,8 @@ public class RouteModelDao {
criteriaList.add("rm.RTM_DELETEDATE_D is null");
if (!criteriaList.isEmpty()) {
sb.append(" where ");
sb.append(Joiner.on(" and ").join(criteriaList));
}
sb.append(" where ");
sb.append(Joiner.on(" and ").join(criteriaList));
// Perform the search
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);

View File

@@ -90,10 +90,8 @@ public class RouteStepDao {
}
criteriaList.add("rs.RTP_DELETEDATE_D is null");
if (!criteriaList.isEmpty()) {
sb.append(" where ");
sb.append(Joiner.on(" and ").join(criteriaList));
}
sb.append(" where ");
sb.append(Joiner.on(" and ").join(criteriaList));
// Perform the search
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);

View File

@@ -19,7 +19,6 @@ public class ShareDao {
*
* @param share Share
* @return New ID
* @throws Exception
*/
public String create(Share share) {
// Create the UUID

View File

@@ -199,10 +199,8 @@ public class TagDao {
criteriaList.add("t.TAG_DELETEDATE_D is null");
if (!criteriaList.isEmpty()) {
sb.append(" where ");
sb.append(Joiner.on(" and ").join(criteriaList));
}
sb.append(" where ");
sb.append(Joiner.on(" and ").join(criteriaList));
// Perform the search
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);

View File

@@ -1,7 +1,14 @@
package com.sismics.docs.core.dao;
import com.google.common.base.Joiner;
import com.google.common.base.Strings;
import at.favre.lib.crypto.bcrypt.BCrypt;
import org.joda.time.DateTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.sismics.docs.core.constant.AuditLogType;
import com.sismics.docs.core.constant.Constants;
import com.sismics.docs.core.dao.criteria.UserCriteria;
import com.sismics.docs.core.dao.dto.UserDto;
import com.sismics.docs.core.model.jpa.User;
@@ -11,8 +18,6 @@ import com.sismics.docs.core.util.jpa.QueryParam;
import com.sismics.docs.core.util.jpa.QueryUtil;
import com.sismics.docs.core.util.jpa.SortCriteria;
import com.sismics.util.context.ThreadLocalContext;
import org.joda.time.DateTime;
import org.mindrot.jbcrypt.BCrypt;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
@@ -26,6 +31,11 @@ import java.util.*;
* @author jtremeaux
*/
public class UserDao {
/**
* Logger.
*/
private static final Logger log = LoggerFactory.getLogger(UserDao.class);
/**
* Authenticates an user.
*
@@ -39,7 +49,8 @@ public class UserDao {
q.setParameter("username", username);
try {
User user = (User) q.getSingleResult();
if (!BCrypt.checkpw(password, user.getPassword()) || user.getDisableDate() != null) {
BCrypt.Result result = BCrypt.verifyer().verify(password.toCharArray(), user.getPassword());
if (!result.verified || user.getDisableDate() != null) {
return null;
}
return user;
@@ -277,7 +288,21 @@ public class UserDao {
* @return Hashed password
*/
private String hashPassword(String password) {
return BCrypt.hashpw(password, BCrypt.gensalt());
int bcryptWork = Constants.DEFAULT_BCRYPT_WORK;
String envBcryptWork = System.getenv(Constants.BCRYPT_WORK_ENV);
if (!Strings.isNullOrEmpty(envBcryptWork)) {
try {
int envBcryptWorkInt = Integer.parseInt(envBcryptWork);
if (envBcryptWorkInt >= 4 && envBcryptWorkInt <= 31) {
bcryptWork = envBcryptWorkInt;
} else {
log.warn(Constants.BCRYPT_WORK_ENV + " needs to be in range 4...31. Falling back to " + Constants.DEFAULT_BCRYPT_WORK + ".");
}
} catch (NumberFormatException e) {
log.warn(Constants.BCRYPT_WORK_ENV + " needs to be a number in range 4...31. Falling back to " + Constants.DEFAULT_BCRYPT_WORK + ".");
}
}
return BCrypt.withDefaults().hashToString(bcryptWork, password.toCharArray());
}
/**

View File

@@ -20,7 +20,6 @@ public class VocabularyDao {
*
* @param vocabulary Vocabulary
* @return New ID
* @throws Exception
*/
public String create(Vocabulary vocabulary) {
// Create the UUID

View File

@@ -42,11 +42,9 @@ public class WebhookDao {
}
criteriaList.add("w.WHK_DELETEDATE_D is null");
if (!criteriaList.isEmpty()) {
sb.append(" where ");
sb.append(Joiner.on(" and ").join(criteriaList));
}
sb.append(" where ");
sb.append(Joiner.on(" and ").join(criteriaList));
// Perform the search
QueryParam queryParam = QueryUtil.getSortedQueryParam(new QueryParam(sb.toString(), parameterMap), sortCriteria);
@SuppressWarnings("unchecked")

View File

@@ -1,5 +1,6 @@
package com.sismics.docs.core.dao.criteria;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
@@ -49,13 +50,13 @@ public class DocumentCriteria {
* Tag IDs.
* The first level list will be AND'ed and the second level list will be OR'ed.
*/
private List<List<String>> tagIdList;
private List<List<String>> tagIdList = new ArrayList<>();
/**
* Tag IDs to excluded.
* Tag IDs to exclude.
* The first and second level list will be excluded.
*/
private List<List<String>> excludedTagIdList;
private List<List<String>> excludedTagIdList = new ArrayList<>();
/**
* Shared status.
@@ -81,7 +82,12 @@ public class DocumentCriteria {
* MIME type of a file.
*/
private String mimeType;
/**
* The title.
*/
private String title;
public List<String> getTargetIdList() {
return targetIdList;
}
@@ -126,19 +132,10 @@ public class DocumentCriteria {
return tagIdList;
}
public void setTagIdList(List<List<String>> tagIdList) {
this.tagIdList = tagIdList;
}
public List<List<String>> getExcludedTagIdList() {
return excludedTagIdList;
}
public DocumentCriteria setExcludedTagIdList(List<List<String>> excludedTagIdList) {
this.excludedTagIdList = excludedTagIdList;
return this;
}
public Boolean getShared() {
return shared;
}
@@ -162,11 +159,7 @@ public class DocumentCriteria {
public void setCreatorId(String creatorId) {
this.creatorId = creatorId;
}
public Boolean getActiveRoute() {
return activeRoute;
}
public Date getUpdateDateMin() {
return updateDateMin;
}
@@ -183,6 +176,10 @@ public class DocumentCriteria {
this.updateDateMax = updateDateMax;
}
public Boolean getActiveRoute() {
return activeRoute;
}
public void setActiveRoute(Boolean activeRoute) {
this.activeRoute = activeRoute;
}
@@ -194,4 +191,12 @@ public class DocumentCriteria {
public void setMimeType(String mimeType) {
this.mimeType = mimeType;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
}

View File

@@ -1,7 +1,6 @@
package com.sismics.docs.core.event;
import com.google.common.base.MoreObjects;
import com.sismics.docs.core.model.jpa.Document;
/**
* Document created event.
@@ -10,32 +9,22 @@ import com.sismics.docs.core.model.jpa.Document;
*/
public class DocumentCreatedAsyncEvent extends UserEvent {
/**
* Created document.
* Document ID.
*/
private Document document;
/**
* Getter of document.
*
* @return the document
*/
public Document getDocument() {
return document;
private String documentId;
public String getDocumentId() {
return documentId;
}
/**
* Setter of document.
*
* @param document document
*/
public void setDocument(Document document) {
this.document = document;
public void setDocumentId(String documentId) {
this.documentId = documentId;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("document", document)
.toString();
.add("documentId", documentId)
.toString();
}
}

View File

@@ -1,7 +1,6 @@
package com.sismics.docs.core.event;
import com.google.common.base.MoreObjects;
import com.sismics.docs.core.model.jpa.File;
/**
* File deleted event.
@@ -10,22 +9,22 @@ import com.sismics.docs.core.model.jpa.File;
*/
public class FileDeletedAsyncEvent extends UserEvent {
/**
* Deleted file.
* File ID.
*/
private File file;
public File getFile() {
return file;
private String fileId;
public String getFileId() {
return fileId;
}
public void setFile(File file) {
this.file = file;
public void setFileId(String fileId) {
this.fileId = fileId;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("file", file)
.add("fileId", fileId)
.toString();
}
}

View File

@@ -1,7 +1,6 @@
package com.sismics.docs.core.event;
import com.google.common.base.MoreObjects;
import com.sismics.docs.core.model.jpa.File;
import java.nio.file.Path;
@@ -12,9 +11,9 @@ import java.nio.file.Path;
*/
public abstract class FileEvent extends UserEvent {
/**
* Created file.
* File ID.
*/
private File file;
private String fileId;
/**
* Language of the file.
@@ -25,15 +24,15 @@ public abstract class FileEvent extends UserEvent {
* Unencrypted original file.
*/
private Path unencryptedFile;
public File getFile() {
return file;
public String getFileId() {
return fileId;
}
public void setFile(File file) {
this.file = file;
public void setFileId(String fileId) {
this.fileId = fileId;
}
public String getLanguage() {
return language;
}
@@ -54,7 +53,7 @@ public abstract class FileEvent extends UserEvent {
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("file", file)
.add("fileId", fileId)
.add("language", language)
.toString();
}

View File

@@ -3,9 +3,11 @@ package com.sismics.docs.core.listener.async;
import com.google.common.eventbus.AllowConcurrentEvents;
import com.google.common.eventbus.Subscribe;
import com.sismics.docs.core.dao.ContributorDao;
import com.sismics.docs.core.dao.DocumentDao;
import com.sismics.docs.core.event.DocumentCreatedAsyncEvent;
import com.sismics.docs.core.model.context.AppContext;
import com.sismics.docs.core.model.jpa.Contributor;
import com.sismics.docs.core.model.jpa.Document;
import com.sismics.docs.core.util.TransactionUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -34,15 +36,22 @@ public class DocumentCreatedAsyncListener {
}
TransactionUtil.handle(() -> {
// Fetch a fresh document
Document document = new DocumentDao().getById(event.getDocumentId());
if (document == null) {
// The document has been deleted since
return;
}
// Add the first contributor (the creator of the document)
ContributorDao contributorDao = new ContributorDao();
Contributor contributor = new Contributor();
contributor.setDocumentId(event.getDocument().getId());
contributor.setDocumentId(event.getDocumentId());
contributor.setUserId(event.getUserId());
contributorDao.create(contributor);
// Update index
AppContext.getInstance().getIndexingHandler().createDocument(event.getDocument());
AppContext.getInstance().getIndexingHandler().createDocument(document);
});
}
}

View File

@@ -4,7 +4,6 @@ import com.google.common.eventbus.AllowConcurrentEvents;
import com.google.common.eventbus.Subscribe;
import com.sismics.docs.core.event.FileDeletedAsyncEvent;
import com.sismics.docs.core.model.context.AppContext;
import com.sismics.docs.core.model.jpa.File;
import com.sismics.docs.core.util.FileUtil;
import com.sismics.docs.core.util.TransactionUtil;
import org.slf4j.Logger;
@@ -35,12 +34,11 @@ public class FileDeletedAsyncListener {
}
// Delete the file from storage
File file = event.getFile();
FileUtil.delete(file);
FileUtil.delete(event.getFileId());
TransactionUtil.handle(() -> {
// Update index
AppContext.getInstance().getIndexingHandler().deleteDocument(file.getId());
AppContext.getInstance().getIndexingHandler().deleteDocument(event.getFileId());
});
}
}

View File

@@ -28,6 +28,7 @@ import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.text.MessageFormat;
import java.util.concurrent.atomic.AtomicReference;
/**
* Listener on file processing.
@@ -52,15 +53,7 @@ public class FileProcessingAsyncListener {
log.info("File created event: " + event.toString());
}
TransactionUtil.handle(() -> {
// Generate thumbnail, extract content
processFile(event);
// Update index
AppContext.getInstance().getIndexingHandler().createFile(event.getFile());
});
FileUtil.endProcessingFile(event.getFile().getId());
processFile(event, true);
}
/**
@@ -71,43 +64,84 @@ public class FileProcessingAsyncListener {
@Subscribe
@AllowConcurrentEvents
public void on(final FileUpdatedAsyncEvent event) {
if (log.isInfoEnabled()) {
log.info("File updated event: " + event.toString());
}
log.info("File updated event: " + event.toString());
TransactionUtil.handle(() -> {
// Generate thumbnail, extract content
processFile(event);
// Update index
AppContext.getInstance().getIndexingHandler().updateFile(event.getFile());
});
FileUtil.endProcessingFile(event.getFile().getId());
processFile(event, false);
}
/**
* Process the file (create/update).
* Process a file :
* Generate thumbnails
* Extract and save text content
*
* @param event File event
* @param isFileCreated True if the file was just created
*/
private void processFile(FileEvent event) {
private void processFile(FileEvent event, boolean isFileCreated) {
AtomicReference<File> file = new AtomicReference<>();
AtomicReference<User> user = new AtomicReference<>();
// Open a first transaction to get what we need to start the processing
TransactionUtil.handle(() -> {
// Generate thumbnail, extract content
file.set(new FileDao().getActiveById(event.getFileId()));
if (file.get() == null) {
// The file has been deleted since
return;
}
// Get the creating user from the database for its private key
UserDao userDao = new UserDao();
user.set(userDao.getById(file.get().getUserId()));
});
// Process the file outside of a transaction
if (user.get() == null || file.get() == null) {
// The user or file has been deleted
FileUtil.endProcessingFile(event.getFileId());
return;
}
String content = extractContent(event, user.get(), file.get());
// Open a new transaction to save the file content
TransactionUtil.handle(() -> {
// Save the file to database
FileDao fileDao = new FileDao();
File freshFile = fileDao.getActiveById(event.getFileId());
if (freshFile == null) {
// The file has been deleted since the text extraction started, ignore the result
return;
}
freshFile.setContent(content);
fileDao.update(freshFile);
// Update index with the updated file
if (isFileCreated) {
AppContext.getInstance().getIndexingHandler().createFile(freshFile);
} else {
AppContext.getInstance().getIndexingHandler().updateFile(freshFile);
}
});
FileUtil.endProcessingFile(event.getFileId());
}
/**
* Extract text content from a file.
* This is executed outside of a transaction.
*
* @param event File event
* @param user User whom created the file
* @param file Fresh file
* @return Text content
*/
private String extractContent(FileEvent event, User user, File file) {
// Find a format handler
final File file = event.getFile();
FormatHandler formatHandler = FormatHandlerUtil.find(file.getMimeType());
if (formatHandler == null) {
log.info("Format unhandled: " + file.getMimeType());
FileUtil.endProcessingFile(file.getId());
return;
}
// Get the creating user from the database for its private key
UserDao userDao = new UserDao();
User user = userDao.getById(file.getUserId());
if (user == null) {
// The user has been deleted meanwhile
FileUtil.endProcessingFile(file.getId());
return;
return null;
}
// Generate file variations
@@ -132,28 +166,21 @@ public class FileProcessingAsyncListener {
ImageUtil.writeJpeg(thumbnail, outputStream);
}
}
} catch (Exception e) {
log.error("Unable to generate thumbnails", e);
} catch (Throwable e) {
log.error("Unable to generate thumbnails for: " + file, e);
}
// Extract text content from the file
long startTime = System.currentTimeMillis();
String content = null;
log.info("Start extracting content from: " + file);
try {
content = formatHandler.extractContent(event.getLanguage(), event.getUnencryptedFile());
} catch (Exception e) {
log.error("Error extracting content from: " + event.getFile(), e);
} catch (Throwable e) {
log.error("Error extracting content from: " + file, e);
}
log.info(MessageFormat.format("File content extracted in {0}ms", System.currentTimeMillis() - startTime));
log.info(MessageFormat.format("File content extracted in {0}ms: " + file.getId(), System.currentTimeMillis() - startTime));
// Save the file to database
FileDao fileDao = new FileDao();
if (fileDao.getActiveById(file.getId()) == null) {
// The file has been deleted since the text extraction started, ignore the result
return;
}
file.setContent(content);
fileDao.update(file);
return content;
}
}

View File

@@ -36,7 +36,7 @@ public class WebhookAsyncListener {
@Subscribe
@AllowConcurrentEvents
public void on(final DocumentCreatedAsyncEvent event) {
triggerWebhook(WebhookEvent.DOCUMENT_CREATED, event.getDocument().getId());
triggerWebhook(WebhookEvent.DOCUMENT_CREATED, event.getDocumentId());
}
@Subscribe
@@ -54,19 +54,19 @@ public class WebhookAsyncListener {
@Subscribe
@AllowConcurrentEvents
public void on(final FileCreatedAsyncEvent event) {
triggerWebhook(WebhookEvent.FILE_CREATED, event.getFile().getId());
triggerWebhook(WebhookEvent.FILE_CREATED, event.getFileId());
}
@Subscribe
@AllowConcurrentEvents
public void on(final FileUpdatedAsyncEvent event) {
triggerWebhook(WebhookEvent.FILE_UPDATED, event.getFile().getId());
triggerWebhook(WebhookEvent.FILE_UPDATED, event.getFileId());
}
@Subscribe
@AllowConcurrentEvents
public void on(final FileDeletedAsyncEvent event) {
triggerWebhook(WebhookEvent.FILE_DELETED, event.getFile().getId());
triggerWebhook(WebhookEvent.FILE_DELETED, event.getFileId());
}
/**
@@ -86,7 +86,7 @@ public class WebhookAsyncListener {
}
});
RequestBody body = RequestBody.create(JSON, "{\"event\": \"" + event.name() + "\", \"id\": \"" + id + "\"}");
RequestBody body = RequestBody.create("{\"event\": \"" + event.name() + "\", \"id\": \"" + id + "\"}", JSON);
for (String webhookUrl : webhookUrlList) {
Request request = new Request.Builder()

View File

@@ -1,11 +1,11 @@
package com.sismics.docs.core.model.context;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.google.common.eventbus.AsyncEventBus;
import com.google.common.eventbus.EventBus;
import com.sismics.docs.core.constant.Constants;
import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.event.RebuildIndexAsyncEvent;
import com.sismics.docs.core.listener.async.*;
import com.sismics.docs.core.model.jpa.User;
import com.sismics.docs.core.service.FileService;
@@ -81,7 +81,7 @@ public class AppContext {
List<Class<? extends IndexingHandler>> indexingHandlerList = Lists.newArrayList(
new ClasspathScanner<IndexingHandler>().findClasses(IndexingHandler.class, "com.sismics.docs.core.util.indexing"));
for (Class<? extends IndexingHandler> handlerClass : indexingHandlerList) {
IndexingHandler handler = handlerClass.newInstance();
IndexingHandler handler = handlerClass.getDeclaredConstructor().newInstance();
if (handler.accept()) {
indexingHandler = handler;
break;
@@ -107,7 +107,7 @@ public class AppContext {
// Change the admin password if needed
String envAdminPassword = System.getenv(Constants.ADMIN_PASSWORD_INIT_ENV);
if (envAdminPassword != null) {
if (!Strings.isNullOrEmpty(envAdminPassword)) {
UserDao userDao = new UserDao();
User adminUser = userDao.getById("admin");
if (Constants.DEFAULT_ADMIN_PASSWORD.equals(adminUser.getPassword())) {
@@ -118,7 +118,7 @@ public class AppContext {
// Change the admin email if needed
String envAdminEmail = System.getenv(Constants.ADMIN_EMAIL_INIT_ENV);
if (envAdminEmail != null) {
if (!Strings.isNullOrEmpty(envAdminEmail)) {
UserDao userDao = new UserDao();
User adminUser = userDao.getById("admin");
if (Constants.DEFAULT_ADMIN_EMAIL.equals(adminUser.getEmail())) {
@@ -172,7 +172,8 @@ public class AppContext {
if (EnvironmentUtil.isUnitTest()) {
return new EventBus();
} else {
ThreadPoolExecutor executor = new ThreadPoolExecutor(8, 8,
int threadCount = Math.max(Runtime.getRuntime().availableProcessors() / 2, 2);
ThreadPoolExecutor executor = new ThreadPoolExecutor(threadCount, threadCount,
1L, TimeUnit.MINUTES,
new LinkedBlockingQueue<>());
asyncExecutorList.add(executor);

View File

@@ -49,7 +49,6 @@ public class File implements Loggable {
/**
* OCR-ized content.
*/
@Lob
@Column(name = "FIL_CONTENT_C")
private String content;

View File

@@ -69,13 +69,18 @@ public class FileService extends AbstractScheduledService {
return Scheduler.newFixedDelaySchedule(0, 5, TimeUnit.SECONDS);
}
public Path createTemporaryFile() throws IOException {
return createTemporaryFile(null);
}
/**
* Create a temporary file.
*
* @param name Wanted file name
* @return New temporary file
*/
public Path createTemporaryFile() throws IOException {
Path path = Files.createTempFile("sismics_docs", null);
public Path createTemporaryFile(String name) throws IOException {
Path path = Files.createTempFile("sismics_docs", name);
referenceSet.add(new TemporaryPathReference(path, referenceQueue));
return path;
}

View File

@@ -1,16 +1,21 @@
package com.sismics.docs.core.service;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.AbstractScheduledService;
import com.sismics.docs.core.constant.ConfigType;
import com.sismics.docs.core.dao.TagDao;
import com.sismics.docs.core.dao.criteria.TagCriteria;
import com.sismics.docs.core.dao.dto.TagDto;
import com.sismics.docs.core.event.DocumentCreatedAsyncEvent;
import com.sismics.docs.core.model.jpa.Config;
import com.sismics.docs.core.model.jpa.Document;
import com.sismics.docs.core.model.jpa.Tag;
import com.sismics.docs.core.util.ConfigUtil;
import com.sismics.docs.core.util.DocumentUtil;
import com.sismics.docs.core.util.FileUtil;
import com.sismics.docs.core.util.TransactionUtil;
import com.sismics.docs.core.util.jpa.SortCriteria;
import com.sismics.util.EmailUtil;
import com.sismics.util.context.ThreadLocalContext;
import org.apache.commons.lang.StringUtils;
@@ -19,9 +24,10 @@ import org.slf4j.LoggerFactory;
import javax.mail.*;
import javax.mail.search.FlagTerm;
import java.util.Date;
import java.util.Properties;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Inbox scanning service.
@@ -79,22 +85,25 @@ public class InboxService extends AbstractScheduledService {
lastSyncDate = new Date();
lastSyncMessageCount = 0;
try {
Map<String, String> tagsNameToId = getAllTags();
inbox = openInbox();
Message[] messages = inbox.search(new FlagTerm(new Flags(Flags.Flag.SEEN), false));
log.info(messages.length + " messages found");
for (Message message : messages) {
importMessage(message);
importMessage(message, tagsNameToId);
lastSyncMessageCount++;
}
} catch (FolderClosedException e) {
// Ignore this, we will just continue importing on the next cycle
} catch (Exception e) {
log.error("Error synching the inbox", e);
log.error("Error syncing the inbox", e);
lastSyncError = e.getMessage();
} finally {
try {
if (inbox != null) {
inbox.close(false);
// The parameter controls if the messages flagged to be deleted, should actually get deleted.
inbox.close(ConfigUtil.getConfigBooleanValue(ConfigType.INBOX_DELETE_IMPORTED));
inbox.getStore().close();
}
} catch (Exception e) {
@@ -172,7 +181,7 @@ public class InboxService extends AbstractScheduledService {
store.connect(ConfigUtil.getConfigStringValue(ConfigType.INBOX_USERNAME),
ConfigUtil.getConfigStringValue(ConfigType.INBOX_PASSWORD));
Folder inbox = store.getFolder("INBOX");
Folder inbox = store.getFolder(ConfigUtil.getConfigStringValue(ConfigType.INBOX_FOLDER));
inbox.open(Folder.READ_WRITE);
return inbox;
}
@@ -183,7 +192,7 @@ public class InboxService extends AbstractScheduledService {
* @param message Message
* @throws Exception e
*/
private void importMessage(Message message) throws Exception {
private void importMessage(Message message, Map<String, String> tags) throws Exception {
log.info("Importing message: " + message.getSubject());
// Parse the mail
@@ -194,12 +203,27 @@ public class InboxService extends AbstractScheduledService {
// Create the document
Document document = new Document();
document.setUserId("admin");
if (mailContent.getSubject() == null) {
document.setTitle("Imported email from EML file");
} else {
document.setTitle(StringUtils.abbreviate(mailContent.getSubject(), 100));
String subject = mailContent.getSubject();
if (subject == null) {
subject = "Imported email from EML file";
}
HashSet<String> tagsFound = new HashSet<>();
if (tags != null) {
Pattern pattern = Pattern.compile("#([^\\s:#]+)");
Matcher matcher = pattern.matcher(subject);
while (matcher.find()) {
if (tags.containsKey(matcher.group(1)) && tags.get(matcher.group(1)) != null) {
tagsFound.add(tags.get(matcher.group(1)));
subject = subject.replaceFirst("#" + matcher.group(1), "");
}
}
log.debug("Tags found: " + String.join(", ", tagsFound));
subject = subject.trim().replaceAll(" +", " ");
}
document.setUserId("admin");
document.setTitle(StringUtils.abbreviate(subject, 100));
document.setDescription(StringUtils.abbreviate(mailContent.getMessage(), 4000));
document.setSubject(StringUtils.abbreviate(mailContent.getSubject(), 500));
document.setFormat("EML");
@@ -220,14 +244,19 @@ public class InboxService extends AbstractScheduledService {
TagDao tagDao = new TagDao();
Tag tag = tagDao.getById(tagId);
if (tag != null) {
tagDao.updateTagList(document.getId(), Sets.newHashSet(tagId));
tagsFound.add(tagId);
}
}
// Update tags
if (!tagsFound.isEmpty()) {
new TagDao().updateTagList(document.getId(), tagsFound);
}
// Raise a document created event
DocumentCreatedAsyncEvent documentCreatedAsyncEvent = new DocumentCreatedAsyncEvent();
documentCreatedAsyncEvent.setUserId("admin");
documentCreatedAsyncEvent.setDocument(document);
documentCreatedAsyncEvent.setDocumentId(document.getId());
ThreadLocalContext.get().addAsyncEvent(documentCreatedAsyncEvent);
// Add files to the document
@@ -235,6 +264,29 @@ public class InboxService extends AbstractScheduledService {
FileUtil.createFile(fileContent.getName(), null, fileContent.getFile(), fileContent.getSize(),
document.getLanguage(), "admin", document.getId());
}
if (ConfigUtil.getConfigBooleanValue(ConfigType.INBOX_DELETE_IMPORTED)) {
message.setFlag(Flags.Flag.DELETED, true);
}
}
/**
* Fetches a HashMap with all tag names as keys and their respective ids as values.
*
* @return Map with all tags or null if not enabled
*/
private Map<String, String> getAllTags() {
if (!ConfigUtil.getConfigBooleanValue(ConfigType.INBOX_AUTOMATIC_TAGS)) {
return null;
}
TagDao tagDao = new TagDao();
List<TagDto> tags = tagDao.findByCriteria(new TagCriteria().setTargetIdList(null), new SortCriteria(1, true));
Map<String, String> tagsNameToId = new HashMap<>();
for (TagDto tagDto : tags) {
tagsNameToId.put(tagDto.getName(), tagDto.getId());
}
return tagsNameToId;
}
public Date getLastSyncDate() {

View File

@@ -50,6 +50,19 @@ public class ConfigUtil {
return Integer.parseInt(value);
}
/**
* Returns the long value of a configuration parameter.
*
* @param configType Type of the configuration parameter
* @return Long value of the configuration parameter
* @throws IllegalStateException Configuration parameter undefined
*/
public static long getConfigLongValue(ConfigType configType) {
String value = getConfigStringValue(configType);
return Long.parseLong(value);
}
/**
* Returns the boolean value of a configuration parameter.
*

View File

@@ -1,6 +1,5 @@
package com.sismics.docs.core.util;
import com.google.common.base.Charsets;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
import com.google.common.io.CharStreams;
@@ -18,6 +17,8 @@ import com.sismics.util.context.ThreadLocalContext;
import com.sismics.util.io.InputStreamReaderThread;
import com.sismics.util.mime.MimeTypeUtil;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.crypto.Cipher;
import javax.crypto.CipherInputStream;
@@ -26,6 +27,7 @@ import java.awt.image.BufferedImage;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.*;
@@ -36,6 +38,11 @@ import java.util.*;
* @author bgamard
*/
public class FileUtil {
/**
* Logger.
*/
private static final Logger log = LoggerFactory.getLogger(FileUtil.class);
/**
* File ID of files currently being processed.
*/
@@ -69,19 +76,19 @@ public class FileUtil {
// Consume the data as text
try (InputStream is = process.getInputStream()) {
return CharStreams.toString(new InputStreamReader(is, Charsets.UTF_8));
return CharStreams.toString(new InputStreamReader(is, StandardCharsets.UTF_8));
}
}
/**
* Remove a file from the storage filesystem.
*
* @param file File to delete
* @param fileId ID of file to delete
*/
public static void delete(File file) throws IOException {
Path storedFile = DirectoryUtil.getStorageDirectory().resolve(file.getId());
Path webFile = DirectoryUtil.getStorageDirectory().resolve(file.getId() + "_web");
Path thumbnailFile = DirectoryUtil.getStorageDirectory().resolve(file.getId() + "_thumb");
public static void delete(String fileId) throws IOException {
Path storedFile = DirectoryUtil.getStorageDirectory().resolve(fileId);
Path webFile = DirectoryUtil.getStorageDirectory().resolve(fileId + "_web");
Path thumbnailFile = DirectoryUtil.getStorageDirectory().resolve(fileId + "_thumb");
if (Files.exists(storedFile)) {
Files.delete(storedFile);
@@ -126,7 +133,7 @@ public class FileUtil {
// Validate global quota
String globalStorageQuotaStr = System.getenv(Constants.GLOBAL_QUOTA_ENV);
if (!Strings.isNullOrEmpty(globalStorageQuotaStr)) {
long globalStorageQuota = Long.valueOf(globalStorageQuotaStr);
long globalStorageQuota = Long.parseLong(globalStorageQuotaStr);
long globalStorageCurrent = userDao.getGlobalStorageCurrent();
if (globalStorageCurrent + fileSize > globalStorageQuota) {
throw new IOException("QuotaReached");
@@ -190,7 +197,7 @@ public class FileUtil {
FileCreatedAsyncEvent fileCreatedAsyncEvent = new FileCreatedAsyncEvent();
fileCreatedAsyncEvent.setUserId(userId);
fileCreatedAsyncEvent.setLanguage(language);
fileCreatedAsyncEvent.setFile(file);
fileCreatedAsyncEvent.setFileId(file.getId());
fileCreatedAsyncEvent.setUnencryptedFile(unencryptedFile);
ThreadLocalContext.get().addAsyncEvent(fileCreatedAsyncEvent);
@@ -211,6 +218,7 @@ public class FileUtil {
*/
public static void startProcessingFile(String fileId) {
processingFileSet.add(fileId);
log.info("Processing started for file: " + fileId);
}
/**
@@ -220,6 +228,7 @@ public class FileUtil {
*/
public static void endProcessingFile(String fileId) {
processingFileSet.remove(fileId);
log.info("Processing ended for file: " + fileId);
}
/**

View File

@@ -1,8 +1,8 @@
package com.sismics.docs.core.util;
import com.google.common.collect.Lists;
import com.sismics.docs.core.dao.dto.TagDto;
import java.util.ArrayList;
import java.util.List;
/**
@@ -12,14 +12,14 @@ import java.util.List;
*/
public class TagUtil {
/**
* Recursively find children of a tags.
* Recursively find children of a tag.
*
* @param parentTagDto Parent tag
* @param allTagDtoList List of all tags
* @return Children tags
*/
public static List<TagDto> findChildren(TagDto parentTagDto, List<TagDto> allTagDtoList) {
List<TagDto> childrenTagDtoList = Lists.newArrayList();
List<TagDto> childrenTagDtoList = new ArrayList<>();
for (TagDto tagDto : allTagDtoList) {
if (parentTagDto.getId().equals(tagDto.getParentId())) {
@@ -32,15 +32,15 @@ public class TagUtil {
}
/**
* Find tags by name (start with).
* Find tags by name (start with, ignore case).
*
* @param name Name
* @param allTagDtoList List of all tags
* @return List of filtered tags
*/
public static List<TagDto> findByName(String name, List<TagDto> allTagDtoList) {
List<TagDto> tagDtoList = Lists.newArrayList();
if (name == null || name.isEmpty()) {
List<TagDto> tagDtoList = new ArrayList<>();
if (name.isEmpty()) {
return tagDtoList;
}
name = name.toLowerCase();

View File

@@ -48,7 +48,7 @@ public class ProcessFilesAction implements Action {
FileUpdatedAsyncEvent event = new FileUpdatedAsyncEvent();
event.setUserId("admin");
event.setLanguage(documentDto.getLanguage());
event.setFile(file);
event.setFileId(file.getId());
event.setUnencryptedFile(unencryptedFile);
ThreadLocalContext.get().addAsyncEvent(event);
}

View File

@@ -20,7 +20,7 @@ public class AuthenticationUtil {
.map(clazz -> {
try {
return clazz.newInstance();
return clazz.getDeclaredConstructor().newInstance();
} catch (Exception e) {
throw new RuntimeException(e);
}

View File

@@ -0,0 +1,127 @@
package com.sismics.docs.core.util.authentication;
import com.sismics.docs.core.constant.ConfigType;
import com.sismics.docs.core.constant.Constants;
import com.sismics.docs.core.dao.ConfigDao;
import com.sismics.docs.core.dao.UserDao;
import com.sismics.docs.core.model.jpa.Config;
import com.sismics.docs.core.model.jpa.User;
import com.sismics.docs.core.util.ConfigUtil;
import com.sismics.util.ClasspathScanner;
import org.apache.directory.api.ldap.model.cursor.EntryCursor;
import org.apache.directory.api.ldap.model.entry.Attribute;
import org.apache.directory.api.ldap.model.entry.Entry;
import org.apache.directory.api.ldap.model.entry.Value;
import org.apache.directory.api.ldap.model.message.SearchScope;
import org.apache.directory.ldap.client.api.DefaultLdapConnectionFactory;
import org.apache.directory.ldap.client.api.LdapConnectionConfig;
import org.apache.directory.ldap.client.api.LdapConnectionPool;
import org.apache.directory.ldap.client.api.ValidatingPoolableLdapConnectionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.UUID;
/**
* LDAP authentication handler.
*
* @author bgamard
*/
@ClasspathScanner.Priority(50) // Before the internal database
public class LdapAuthenticationHandler implements AuthenticationHandler {
/**
* Logger.
*/
private static final Logger log = LoggerFactory.getLogger(LdapAuthenticationHandler.class);
/**
* LDAP connection pool.
*/
private static LdapConnectionPool pool;
/**
* Reset the LDAP pool.
*/
public static void reset() {
if (pool != null) {
try {
pool.close();
} catch (Exception e) {
// NOP
}
}
pool = null;
}
/**
* Initialize the LDAP pool.
*/
private static void init() {
ConfigDao configDao = new ConfigDao();
Config ldapEnabled = configDao.getById(ConfigType.LDAP_ENABLED);
if (pool != null || ldapEnabled == null || !Boolean.parseBoolean(ldapEnabled.getValue())) {
return;
}
LdapConnectionConfig config = new LdapConnectionConfig();
config.setLdapHost(ConfigUtil.getConfigStringValue(ConfigType.LDAP_HOST));
config.setLdapPort(ConfigUtil.getConfigIntegerValue(ConfigType.LDAP_PORT));
config.setName(ConfigUtil.getConfigStringValue(ConfigType.LDAP_ADMIN_DN));
config.setCredentials(ConfigUtil.getConfigStringValue(ConfigType.LDAP_ADMIN_PASSWORD));
DefaultLdapConnectionFactory factory = new DefaultLdapConnectionFactory(config);
pool = new LdapConnectionPool(new ValidatingPoolableLdapConnectionFactory(factory), null);
}
@Override
public User authenticate(String username, String password) {
init();
if (pool == null) {
return null;
}
// Fetch and authenticate the user
Entry userEntry;
try {
EntryCursor cursor = pool.getConnection().search(ConfigUtil.getConfigStringValue(ConfigType.LDAP_BASE_DN),
ConfigUtil.getConfigStringValue(ConfigType.LDAP_FILTER).replace("USERNAME", username), SearchScope.SUBTREE);
if (cursor.next()) {
userEntry = cursor.get();
pool.getConnection().bind(userEntry.getDn(), password);
} else {
// User not found
return null;
}
} catch (Exception e) {
log.error("Error authenticating \"" + username + "\" using the LDAP", e);
return null;
}
UserDao userDao = new UserDao();
User user = userDao.getActiveByUsername(username);
if (user == null) {
// The user is valid but never authenticated, create the user now
log.info("\"" + username + "\" authenticated for the first time, creating the internal user");
user = new User();
user.setRoleId(Constants.DEFAULT_USER_ROLE);
user.setUsername(username);
user.setPassword(UUID.randomUUID().toString()); // No authentication using the internal database
Attribute mailAttribute = userEntry.get("mail");
if (mailAttribute == null || mailAttribute.get() == null) {
user.setEmail(ConfigUtil.getConfigStringValue(ConfigType.LDAP_DEFAULT_EMAIL));
} else {
Value value = mailAttribute.get();
user.setEmail(value.getString());
}
user.setStorageQuota(ConfigUtil.getConfigLongValue(ConfigType.LDAP_DEFAULT_STORAGE));
try {
userDao.create(user, "admin");
} catch (Exception e) {
log.error("Error while creating the internal user", e);
return null;
}
}
return user;
}
}

View File

@@ -3,6 +3,7 @@ package com.sismics.docs.core.util.format;
import com.google.common.collect.Lists;
import com.sismics.util.ClasspathScanner;
import java.lang.reflect.InvocationTargetException;
import java.util.List;
/**
@@ -26,12 +27,12 @@ public class FormatHandlerUtil {
public static FormatHandler find(String mimeType) {
try {
for (Class<? extends FormatHandler> formatHandlerClass : FORMAT_HANDLERS) {
FormatHandler formatHandler = formatHandlerClass.newInstance();
FormatHandler formatHandler = formatHandlerClass.getDeclaredConstructor().newInstance();
if (formatHandler.accept(mimeType)) {
return formatHandler;
}
}
} catch (InstantiationException | IllegalAccessException e) {
} catch (Exception e) {
return null;
}

View File

@@ -9,7 +9,7 @@ import org.apache.pdfbox.pdmodel.PDPageContentStream;
import org.apache.pdfbox.pdmodel.common.PDRectangle;
import org.apache.pdfbox.pdmodel.graphics.image.LosslessFactory;
import org.apache.pdfbox.pdmodel.graphics.image.PDImageXObject;
import org.apache.poi.xslf.extractor.XSLFPowerPointExtractor;
import org.apache.poi.sl.extractor.SlideShowExtractor;
import org.apache.poi.xslf.usermodel.XMLSlideShow;
import org.apache.poi.xslf.usermodel.XSLFSlide;
@@ -50,7 +50,7 @@ public class PptxFormatHandler implements FormatHandler {
@Override
public String extractContent(String language, Path file) throws Exception {
XMLSlideShow pptx = loadPPtxFile(file);
return new XSLFPowerPointExtractor(pptx).getText();
return new SlideShowExtractor<>(pptx).getText();
}
@Override

View File

@@ -11,6 +11,7 @@ import org.apache.pdfbox.pdmodel.PDDocument;
import java.awt.image.BufferedImage;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
@@ -33,7 +34,7 @@ public class TextPlainFormatHandler implements FormatHandler {
PdfWriter.getInstance(output, pdfOutputStream);
output.open();
String content = new String(Files.readAllBytes(file), Charsets.UTF_8);
String content = Files.readString(file, StandardCharsets.UTF_8);
Font font = FontFactory.getFont("LiberationMono-Regular");
Paragraph paragraph = new Paragraph(content, font);
paragraph.setAlignment(Element.ALIGN_LEFT);
@@ -46,7 +47,7 @@ public class TextPlainFormatHandler implements FormatHandler {
@Override
public String extractContent(String language, Path file) throws Exception {
return new String(Files.readAllBytes(file), "UTF-8");
return Files.readString(file, StandardCharsets.UTF_8);
}
@Override

View File

@@ -1,6 +1,5 @@
package com.sismics.docs.core.util.format;
import com.google.common.base.Charsets;
import com.google.common.collect.Lists;
import com.google.common.io.ByteStreams;
import com.google.common.io.Closer;
@@ -13,6 +12,7 @@ import javax.imageio.ImageIO;
import java.awt.image.BufferedImage;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.util.Arrays;
import java.util.List;
@@ -65,7 +65,7 @@ public class VideoFormatHandler implements FormatHandler {
// Consume the data as a string
try (InputStream is = process.getInputStream()) {
return new String(ByteStreams.toByteArray(is), Charsets.UTF_8);
return new String(ByteStreams.toByteArray(is), StandardCharsets.UTF_8);
} catch (Exception e) {
return null;
}

View File

@@ -37,9 +37,9 @@ import org.apache.lucene.search.spell.LuceneDictionary;
import org.apache.lucene.search.suggest.Lookup;
import org.apache.lucene.search.suggest.analyzing.FuzzySuggester;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.NIOFSDirectory;
import org.apache.lucene.store.NoLockFactory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.store.SimpleFSDirectory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -116,7 +116,7 @@ public class LuceneIndexingHandler implements IndexingHandler {
} else if (luceneStorage.equals("FILE")) {
Path luceneDirectory = DirectoryUtil.getLuceneDirectory();
log.info("Using file Lucene storage: {}", luceneDirectory);
directory = new SimpleFSDirectory(luceneDirectory, NoLockFactory.INSTANCE);
directory = new NIOFSDirectory(luceneDirectory, NoLockFactory.INSTANCE);
}
// Create an index writer
@@ -277,7 +277,7 @@ public class LuceneIndexingHandler implements IndexingHandler {
criteriaList.add("d.DOC_ID_C in :documentIdList");
parameterMap.put("documentIdList", documentSearchMap.keySet());
suggestSearchTerms(criteria.getSearch(), suggestionList);
suggestSearchTerms(criteria.getFullSearch(), suggestionList);
}
if (criteria.getCreateDateMin() != null) {
criteriaList.add("d.DOC_CREATEDATE_D >= :createDateMin");
@@ -295,7 +295,11 @@ public class LuceneIndexingHandler implements IndexingHandler {
criteriaList.add("d.DOC_UPDATEDATE_D <= :updateDateMax");
parameterMap.put("updateDateMax", criteria.getUpdateDateMax());
}
if (criteria.getTagIdList() != null && !criteria.getTagIdList().isEmpty()) {
if (criteria.getTitle() != null) {
criteriaList.add("d.DOC_TITLE_C = :title");
parameterMap.put("title", criteria.getTitle());
}
if (!criteria.getTagIdList().isEmpty()) {
int index = 0;
for (List<String> tagIdList : criteria.getTagIdList()) {
List<String> tagCriteriaList = Lists.newArrayList();

View File

@@ -29,6 +29,7 @@ import java.io.IOException;
import java.io.InputStream;
import java.io.StringReader;
import java.io.StringWriter;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
@@ -87,29 +88,34 @@ public class EmailUtil {
try {
// Build email headers
HtmlEmail email = new HtmlEmail();
email.setCharset("UTF-8");
email.setCharset(StandardCharsets.UTF_8.name());
ConfigDao configDao = new ConfigDao();
// Hostname
String envHostname = System.getenv(Constants.SMTP_HOSTNAME_ENV);
if (envHostname == null) {
if (Strings.isNullOrEmpty(envHostname)) {
email.setHostName(ConfigUtil.getConfigStringValue(ConfigType.SMTP_HOSTNAME));
} else {
email.setHostName(envHostname);
}
// Port
int port = ConfigUtil.getConfigIntegerValue(ConfigType.SMTP_PORT);
String envPort = System.getenv(Constants.SMTP_PORT_ENV);
if (envPort == null) {
email.setSmtpPort(ConfigUtil.getConfigIntegerValue(ConfigType.SMTP_PORT));
} else {
email.setSmtpPort(Integer.valueOf(envPort));
if (!Strings.isNullOrEmpty(envPort)) {
port = Integer.valueOf(envPort);
}
email.setSmtpPort(port);
if (port == 465) {
email.setSSLOnConnect(true);
} else if (port == 587) {
email.setStartTLSRequired(true);
}
// Username and password
String envUsername = System.getenv(Constants.SMTP_USERNAME_ENV);
String envPassword = System.getenv(Constants.SMTP_PASSWORD_ENV);
if (envUsername == null || envPassword == null) {
if (Strings.isNullOrEmpty(envUsername) || Strings.isNullOrEmpty(envPassword)) {
Config usernameConfig = configDao.getById(ConfigType.SMTP_USERNAME);
Config passwordConfig = configDao.getById(ConfigType.SMTP_PASSWORD);
if (usernameConfig != null && passwordConfig != null) {

View File

@@ -1,6 +1,6 @@
package com.sismics.util;
import org.jsoup.helper.StringUtil;
import org.jsoup.internal.StringUtil;
import org.jsoup.nodes.Element;
import org.jsoup.nodes.Node;
import org.jsoup.nodes.TextNode;
@@ -28,7 +28,7 @@ public class HtmlToPlainText {
}
// the formatting rules, implemented in a breadth-first DOM traverse
private class FormattingVisitor implements NodeVisitor {
static private class FormattingVisitor implements NodeVisitor {
private static final int maxWidth = 80;
private int width = 0;
private StringBuilder accum = new StringBuilder(); // holds the accumulated text
@@ -64,7 +64,7 @@ public class HtmlToPlainText {
return; // don't accumulate long runs of empty spaces
if (text.length() + width > maxWidth) { // won't fit, needs to wrap
String words[] = text.split("\\s+");
String[] words = text.split("\\s+");
for (int i = 0; i < words.length; i++) {
String word = words[i];
boolean last = i == words.length - 1;

View File

@@ -1,6 +1,5 @@
package com.sismics.util;
import com.google.common.base.Charsets;
import com.google.common.hash.Hashing;
import javax.imageio.IIOImage;
@@ -13,6 +12,7 @@ import java.awt.image.BufferedImage;
import java.awt.image.WritableRaster;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.util.Iterator;
/**
@@ -80,7 +80,7 @@ public class ImageUtil {
}
return Hashing.md5().hashString(
email.trim().toLowerCase(), Charsets.UTF_8)
email.trim().toLowerCase(), StandardCharsets.UTF_8)
.toString();
}

View File

@@ -8,6 +8,7 @@ import java.io.IOException;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLDecoder;
import java.nio.charset.StandardCharsets;
import java.text.MessageFormat;
import java.util.*;
import java.util.jar.JarEntry;
@@ -53,7 +54,7 @@ public class ResourceUtil {
// Extract the JAR path
String jarPath = dirUrl.getPath().substring(5, dirUrl.getPath().indexOf("!"));
JarFile jar = new JarFile(URLDecoder.decode(jarPath, "UTF-8"));
JarFile jar = new JarFile(URLDecoder.decode(jarPath, StandardCharsets.UTF_8));
Set<String> fileSet = new HashSet<String>();
try {

View File

@@ -1,8 +1,8 @@
package com.sismics.util.jpa;
import com.google.common.base.Strings;
import com.sismics.docs.core.util.DirectoryUtil;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import org.hibernate.cfg.Environment;
import org.hibernate.internal.util.config.ConfigurationHelper;
import org.hibernate.service.ServiceRegistry;
import org.slf4j.Logger;
@@ -34,7 +34,6 @@ public final class EMF {
try {
properties = getEntityManagerProperties();
Environment.verifyProperties(properties);
ConfigurationHelper.resolvePlaceHolders(properties);
ServiceRegistry reg = new StandardServiceRegistryBuilder().applySettings(properties).build();
@@ -85,7 +84,7 @@ public final class EMF {
Map<Object, Object> props = new HashMap<>();
Path dbDirectory = DirectoryUtil.getDbDirectory();
String dbFile = dbDirectory.resolve("docs").toAbsolutePath().toString();
if (databaseUrl == null) {
if (Strings.isNullOrEmpty(databaseUrl)) {
props.put("hibernate.connection.driver_class", "org.h2.Driver");
props.put("hibernate.dialect", "org.hibernate.dialect.HSQLDialect");
props.put("hibernate.connection.url", "jdbc:h2:file:" + dbFile + ";CACHE_SIZE=65536;LOCK_TIMEOUT=10000");

View File

@@ -13,7 +13,7 @@ public class MimeType {
public static final String IMAGE_GIF = "image/gif";
public static final String APPLICATION_ZIP = "application/zip";
public static final String APPLICATION_PDF = "application/pdf";
public static final String OPEN_DOCUMENT_TEXT = "application/vnd.oasis.opendocument.text";

View File

@@ -1,15 +1,9 @@
package com.sismics.util.mime;
import com.google.common.base.Charsets;
import org.apache.commons.compress.utils.IOUtils;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.net.URLConnection;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
/**
* Utility to check MIME types.
@@ -18,7 +12,7 @@ import java.util.zip.ZipInputStream;
*/
public class MimeTypeUtil {
/**
* Try to guess the MIME type of a file by its magic number (header).
* Try to guess the MIME type of a file.
*
* @param file File to inspect
* @param name File name
@@ -26,57 +20,17 @@ public class MimeTypeUtil {
* @throws IOException e
*/
public static String guessMimeType(Path file, String name) throws IOException {
String mimeType;
try (InputStream is = Files.newInputStream(file)) {
byte[] headerBytes = new byte[64];
is.read(headerBytes);
mimeType = guessMimeType(headerBytes, name);
String mimeType = Files.probeContentType(file);
if (mimeType == null && name != null) {
mimeType = URLConnection.getFileNameMap().getContentTypeFor(name);
}
return guessOpenDocumentFormat(mimeType, file);
}
/**
* Try to guess the MIME type of a file by its magic number (header).
*
* @param headerBytes File header (first bytes)
* @param name File name
* @return MIME type
* @throws UnsupportedEncodingException e
*/
public static String guessMimeType(byte[] headerBytes, String name) throws UnsupportedEncodingException {
String header = new String(headerBytes, "US-ASCII");
// Detect by header bytes
if (header.startsWith("PK")) {
return MimeType.APPLICATION_ZIP;
} else if (header.startsWith("GIF87a") || header.startsWith("GIF89a")) {
return MimeType.IMAGE_GIF;
} else if (headerBytes[0] == ((byte) 0xff) && headerBytes[1] == ((byte) 0xd8)) {
return MimeType.IMAGE_JPEG;
} else if (headerBytes[0] == ((byte) 0x89) && headerBytes[1] == ((byte) 0x50) && headerBytes[2] == ((byte) 0x4e) && headerBytes[3] == ((byte) 0x47) &&
headerBytes[4] == ((byte) 0x0d) && headerBytes[5] == ((byte) 0x0a) && headerBytes[6] == ((byte) 0x1a) && headerBytes[7] == ((byte) 0x0a)) {
return MimeType.IMAGE_PNG;
} else if (headerBytes[0] == ((byte) 0x25) && headerBytes[1] == ((byte) 0x50) && headerBytes[2] == ((byte) 0x44) && headerBytes[3] == ((byte) 0x46)) {
return MimeType.APPLICATION_PDF;
} else if (headerBytes[0] == ((byte) 0x00) && headerBytes[1] == ((byte) 0x00) && headerBytes[2] == ((byte) 0x00)
&& (headerBytes[3] == ((byte) 0x14) || headerBytes[3] == ((byte) 0x18) || headerBytes[3] == ((byte) 0x20))
&& headerBytes[4] == ((byte) 0x66) && headerBytes[5] == ((byte) 0x74) && headerBytes[6] == ((byte) 0x79) && headerBytes[7] == ((byte) 0x70)) {
return MimeType.VIDEO_MP4;
} else if (headerBytes[0] == ((byte) 0x1a) && headerBytes[1] == ((byte) 0x45) && headerBytes[2] == ((byte) 0xdf) && headerBytes[3] == ((byte) 0xa3)) {
return MimeType.VIDEO_WEBM;
if (mimeType == null) {
return MimeType.DEFAULT;
}
// Detect by file extension
if (name != null) {
if (name.endsWith(".txt")) {
return MimeType.TEXT_PLAIN;
} else if (name.endsWith(".csv")) {
return MimeType.TEXT_CSV;
}
}
return MimeType.DEFAULT;
return mimeType;
}
/**
@@ -113,52 +67,4 @@ public class MimeTypeUtil {
return "bin";
}
}
/**
* Guess the MIME type of open document formats (docx and odt).
* It's more costly than the simple header check, but needed because open document formats
* are simple ZIP files on the outside and much bigger on the inside.
*
* @param mimeType Currently detected MIME type
* @param file File on disk
* @return MIME type
*/
private static String guessOpenDocumentFormat(String mimeType, Path file) {
if (!MimeType.APPLICATION_ZIP.equals(mimeType)) {
// open document formats are ZIP files
return mimeType;
}
try (InputStream inputStream = Files.newInputStream(file);
ZipInputStream zipInputStream = new ZipInputStream(inputStream, Charsets.ISO_8859_1)) {
ZipEntry archiveEntry = zipInputStream.getNextEntry();
while (archiveEntry != null) {
if (archiveEntry.getName().equals("mimetype")) {
// Maybe it's an ODT file
String content = new String(IOUtils.toByteArray(zipInputStream), Charsets.ISO_8859_1);
if (MimeType.OPEN_DOCUMENT_TEXT.equals(content.trim())) {
mimeType = MimeType.OPEN_DOCUMENT_TEXT;
break;
}
} else if (archiveEntry.getName().equals("[Content_Types].xml")) {
// Maybe it's a DOCX file
String content = new String(IOUtils.toByteArray(zipInputStream), Charsets.ISO_8859_1);
if (content.contains(MimeType.OFFICE_DOCUMENT)) {
mimeType = MimeType.OFFICE_DOCUMENT;
break;
} else if (content.contains(MimeType.OFFICE_PRESENTATION)) {
mimeType = MimeType.OFFICE_PRESENTATION;
break;
}
}
archiveEntry = zipInputStream.getNextEntry();
}
} catch (Exception e) {
// In case of any error, just give up and keep the ZIP MIME type
return mimeType;
}
return mimeType;
}
}

View File

@@ -1 +1 @@
db.version=24
db.version=27

View File

@@ -41,4 +41,4 @@ insert into T_LOCALE(LOC_ID_C) values('fr');
insert into T_ROLE(ROL_ID_C, ROL_NAME_C, ROL_CREATEDATE_D) values('admin', 'Admin', NOW());
insert into T_ROLE(ROL_ID_C, ROL_NAME_C, ROL_CREATEDATE_D) values('user', 'User', NOW());
insert into T_ROLE_BASE_FUNCTION(RBF_ID_C, RBF_IDROLE_C, RBF_IDBASEFUNCTION_C, RBF_CREATEDATE_D) values('admin_ADMIN', 'admin', 'ADMIN', NOW());
insert into T_USER(USE_ID_C, USE_IDLOCALE_C, USE_IDROLE_C, USE_USERNAME_C, USE_PASSWORD_C, USE_EMAIL_C, USE_THEME_C, USE_FIRSTCONNECTION_B, USE_CREATEDATE_D, USE_PRIVATEKEY_C) values('admin', 'en', 'admin', 'admin', '$2a$05$6Ny3TjrW3aVAL1or2SlcR.fhuDgPKp5jp.P9fBXwVNePgeLqb4i3C', 'admin@localhost', 'default.less', true, NOW(), 'AdminPk');
insert into T_USER(USE_ID_C, USE_IDLOCALE_C, USE_IDROLE_C, USE_USERNAME_C, USE_PASSWORD_C, USE_EMAIL_C, USE_THEME_C, USE_FIRSTCONNECTION_B, USE_CREATEDATE_D, USE_PRIVATEKEY_C) values('admin', 'en', 'admin', 'admin', '$2y$10$xg0EEKVUehutDI1m6qQhVeFz7SMQMl1jQzjf2KkVsR2c7aV2vyyjK', 'admin@localhost', 'default.less', true, NOW(), 'AdminPk');

View File

@@ -0,0 +1,3 @@
insert into T_CONFIG(CFG_ID_C, CFG_VALUE_C) values('INBOX_AUTOMATIC_TAGS', 'false');
insert into T_CONFIG(CFG_ID_C, CFG_VALUE_C) values('INBOX_DELETE_IMPORTED', 'false');
update T_CONFIG set CFG_VALUE_C = '25' where CFG_ID_C = 'DB_VERSION';

View File

@@ -0,0 +1,2 @@
!PGSQL!UPDATE t_file SET fil_content_c = convert_from(loread(lo_open(fil_content_c::int, CAST( x'20000' AS integer)), 999999999), 'UNICODE')::TEXT WHERE fil_content_c IS NOT NULL;
update T_CONFIG set CFG_VALUE_C = '26' where CFG_ID_C = 'DB_VERSION';

View File

@@ -0,0 +1,2 @@
insert into T_CONFIG(CFG_ID_C, CFG_VALUE_C) values('INBOX_FOLDER', 'INBOX');
update T_CONFIG set CFG_VALUE_C = '27' where CFG_ID_C = 'DB_VERSION';

View File

@@ -1,10 +0,0 @@
email.template.password_recovery.subject=Bitte setzen Sie ihr Passwort zur\u00FCck
email.template.password_recovery.hello=Hallo {0}.
email.template.password_recovery.instruction1=Wir haben eine Anfrage zum Zur\u00FCcksetzen Ihres Passworts erhalten.<br/>Wenn Sie keine Hilfe angefordert haben, k\u00F6nnen Sie diese E-Mail einfach ignorieren.
email.template.password_recovery.instruction2=Um Ihr Passwort zur\u00FCckzusetzen, besuchen Sie bitte den folgenden Link:
email.template.password_recovery.click_here=Klicken Sie hier, um Ihr Passwort zur\u00FCckzusetzen
email.template.route_step_validate.subject=Ein Dokument braucht Ihre Aufmerksamkeit
email.template.route_step_validate.hello=Hallo {0}.
email.template.route_step_validate.instruction1=Ihnen wurde ein Workflow-Schritt zugewiesen, der Ihre Aufmerksamkeit erfordert.
email.template.route_step_validate.instruction2=Um das Dokument anzuzeigen und den Workflow zu \u00FCberpr\u00FCfen, besuchen Sie bitte den folgenden Link:
email.no_html.error=Ihr E-Mail-Client unterst\u00FCtzt keine HTML-Nachrichten

View File

@@ -1,10 +0,0 @@
email.template.password_recovery.subject=R\u00E9initialiser votre mot de passe
email.template.password_recovery.hello=Bonjour {0}.
email.template.password_recovery.instruction1=Nous avons re\u00E7u une demande de r\u00E9initialisation de mot de passe.<br/>Si vous n'avez rien demand\u00E9, vous pouvez ignorer cet mail.
email.template.password_recovery.instruction2=Pour r\u00E9initialiser votre mot de passe, cliquez sur le lien ci-dessous :
email.template.password_recovery.click_here=Cliquez ici pour r\u00E9initialiser votre mot de passe.
email.template.route_step_validate.subject=Un document n\u00E9cessite votre attention
email.template.route_step_validate.hello=Bonjour {0}.
email.template.route_step_validate.instruction1=Une \u00E9tape de workflow vous a \u00E9t\u00E9 attribu\u00E9e et n\u00E9cessite votre attention.
email.template.route_step_validate.instruction2=Pour voir le document et valider le workflow, veuillez visiter le lien ci-dessous :
email.no_html.error=Votre client mail ne supporte pas les messages HTML

View File

@@ -1,10 +0,0 @@
email.template.password_recovery.subject=\u8BF7\u91CD\u7F6E\u60A8\u7684\u5BC6\u7801
email.template.password_recovery.hello=\u60A8\u597D {0}.
email.template.password_recovery.instruction1=\u6211\u4EEC\u6536\u5230\u4E86\u4E00\u4E2A\u91CD\u7F6E\u60A8\u7684\u5BC6\u7801\u7684\u8BF7\u6C42\u3002<br/>\u5982\u679C\u60A8\u6CA1\u6709\u53D1\u9001\u8BE5\u8BF7\u6C42\uFF0C\u8BF7\u5FFD\u7565\u6B64\u7535\u5B50\u90AE\u4EF6
email.template.password_recovery.instruction2=\u8981\u91CD\u7F6E\u60A8\u7684\u5BC6\u7801\uFF0C\u8BF7\u8BBF\u95EE\u4EE5\u4E0B\u94FE\u63A5\uFF1A
email.template.password_recovery.click_here=\u8BF7\u70B9\u51FB\u6B64\u5904\u91CD\u7F6E\u60A8\u7684\u5BC6\u7801
email.template.route_step_validate.subject=\u4E00\u4EFD\u6587\u4EF6\u9700\u8981\u4F60\u7684\u5173\u6CE8
email.template.route_step_validate.hello={0}\uFF0C\u60A8\u597D.
email.template.route_step_validate.instruction1=\u5DE5\u4F5C\u6D41\u6B65\u9AA4\u5DF2\u7ECF\u5206\u914D\u7ED9\u60A8\uFF0C\u9700\u8981\u60A8\u7684\u5173\u6CE8\u3002
email.template.route_step_validate.instruction2=\u8981\u67E5\u770B\u6587\u6863\u5E76\u9A8C\u8BC1\u5DE5\u4F5C\u6D41\u7A0B\uFF0C\u8BF7\u8BBF\u95EE\u4EE5\u4E0B\u94FE\u63A5\uFF1A
email.no_html.error=\u60A8\u7684\u7535\u5B50\u90AE\u4EF6\u5BA2\u6237\u7AEF\u4E0D\u652F\u6301HTML\u683C\u5F0F\u90AE\u4EF6

View File

@@ -1,10 +0,0 @@
email.template.password_recovery.subject=\u8ACB\u91CD\u65B0\u8A2D\u7F6E\u60A8\u7684\u5BC6\u78BC
email.template.password_recovery.hello=\u60A8\u597D{0}\uFF01
email.template.password_recovery.instruction1=\u6211\u5011\u6536\u5230\u4E86\u91CD\u7F6E\u5BC6\u78BC\u7684\u8ACB\u6C42\u3002<br/>\u5982\u679C\u60A8\u6C92\u6709\u8ACB\u6C42\u5E6B\u52A9\uFF0C\u8ACB\u5FFD\u7565\u6B64\u96FB\u5B50\u90F5\u4EF6\u3002
email.template.password_recovery.instruction2=\u8981\u91CD\u7F6E\u60A8\u7684\u5BC6\u78BC\uFF0C\u8ACB\u8A2A\u554F\u4EE5\u4E0B\u93C8\u63A5\uFF1A
email.template.password_recovery.click_here=\u9EDE\u64CA\u9019\u88E1\u91CD\u7F6E\u60A8\u7684\u5BC6\u78BC
email.template.route_step_validate.subject=\u4E00\u4EFD\u6587\u4EF6\u9700\u8981\u4F60\u7684\u95DC\u6CE8
email.template.route_step_validate.hello={0}\uFF0C\u60A8\u597D.
email.template.route_step_validate.instruction1=\u5DE5\u4F5C\u6D41\u6B65\u9A5F\u5DF2\u7D93\u5206\u914D\u7D66\u60A8\uFF0C\u9700\u8981\u60A8\u7684\u95DC\u6CE8\u3002
email.template.route_step_validate.instruction2=\u8981\u67E5\u770B\u6587\u6A94\u4E26\u9A57\u8B49\u5DE5\u4F5C\u6D41\u7A0B\uFF0C\u8ACB\u8A2A\u554F\u4EE5\u4E0B\u93C8\u63A5\uFF1A
email.no_html.error=\u60A8\u7684\u96FB\u5B50\u90F5\u4EF6\u5BA2\u6236\u7AEF\u4E0D\u652F\u6301HTML\u683C\u5F0F\u90F5\u4EF6

View File

@@ -0,0 +1,10 @@
email.template.password_recovery.subject=Proszę zresetować swoje hasło
email.template.password_recovery.hello=Witaj {0}.
email.template.password_recovery.instruction1=Otrzymaliśmy żądanie zresetowania twojego hasła.<br/>Jeśli to nie ty potrzebujesz pomocy, moóżesz zignorować ten email.
email.template.password_recovery.instruction2=Aby zresetować swoje hasło, proszę naciśnij link poniżej:
email.template.password_recovery.click_here=Naciśnij, aby zresetować swoje hasło
email.template.route_step_validate.subject=Dokument potrzebuje twojej uwagi
email.template.route_step_validate.hello=Witaj {0}.
email.template.route_step_validate.instruction1=Został Ci przypisany etap przepływu i wymaga Twojej uwagi.
email.template.route_step_validate.instruction2=Aby wyświetlić dokument i zweryfikować przepływ pracy, kliknij poniższy link:
email.no_html.error=Twój klient poczty e-mail nie obsługuje wiadomości HTML

View File

@@ -137,7 +137,6 @@ public class TestFileUtil {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
PdfUtil.convertToPdf(documentDto, Lists.newArrayList(file0, file1, file2, file3, file4, file5), true, true, 10, outputStream);
Assert.assertTrue(outputStream.toByteArray().length > 0);
com.google.common.io.Files.write(outputStream.toByteArray(), new java.io.File("C:\\Users\\Jendib\\Downloads\\test.pdf"));
}
}
}

View File

@@ -15,7 +15,7 @@ import java.nio.file.Paths;
*/
public class TestMimeTypeUtil {
@Test
public void guessOpenDocumentFormatTest() throws Exception {
public void test() throws Exception {
// Detect ODT files
Path path = Paths.get(ClassLoader.getSystemResource("file/document.odt").toURI());
Assert.assertEquals(MimeType.OPEN_DOCUMENT_TEXT, MimeTypeUtil.guessMimeType(path, "document.odt"));
@@ -27,5 +27,45 @@ public class TestMimeTypeUtil {
// Detect PPTX files
path = Paths.get(ClassLoader.getSystemResource("file/apache.pptx").toURI());
Assert.assertEquals(MimeType.OFFICE_PRESENTATION, MimeTypeUtil.guessMimeType(path, "apache.pptx"));
// Detect XLSX files
path = Paths.get(ClassLoader.getSystemResource("file/document.xlsx").toURI());
Assert.assertEquals(MimeType.OFFICE_SHEET, MimeTypeUtil.guessMimeType(path, "document.xlsx"));
// Detect TXT files
path = Paths.get(ClassLoader.getSystemResource("file/document.txt").toURI());
Assert.assertEquals(MimeType.TEXT_PLAIN, MimeTypeUtil.guessMimeType(path, "document.txt"));
// Detect CSV files
path = Paths.get(ClassLoader.getSystemResource("file/document.csv").toURI());
Assert.assertEquals(MimeType.TEXT_CSV, MimeTypeUtil.guessMimeType(path, "document.csv"));
// Detect PDF files
path = Paths.get(ClassLoader.getSystemResource("file/udhr.pdf").toURI());
Assert.assertEquals(MimeType.APPLICATION_PDF, MimeTypeUtil.guessMimeType(path, "udhr.pdf"));
// Detect JPEG files
path = Paths.get(ClassLoader.getSystemResource("file/apollo_portrait.jpg").toURI());
Assert.assertEquals(MimeType.IMAGE_JPEG, MimeTypeUtil.guessMimeType(path, "apollo_portrait.jpg"));
// Detect GIF files
path = Paths.get(ClassLoader.getSystemResource("file/image.gif").toURI());
Assert.assertEquals(MimeType.IMAGE_GIF, MimeTypeUtil.guessMimeType(path, "image.gif"));
// Detect PNG files
path = Paths.get(ClassLoader.getSystemResource("file/image.png").toURI());
Assert.assertEquals(MimeType.IMAGE_PNG, MimeTypeUtil.guessMimeType(path, "image.png"));
// Detect ZIP files
path = Paths.get(ClassLoader.getSystemResource("file/document.zip").toURI());
Assert.assertEquals(MimeType.APPLICATION_ZIP, MimeTypeUtil.guessMimeType(path, "document.zip"));
// Detect WEBM files
path = Paths.get(ClassLoader.getSystemResource("file/video.webm").toURI());
Assert.assertEquals(MimeType.VIDEO_WEBM, MimeTypeUtil.guessMimeType(path, "video.webm"));
// Detect MP4 files
path = Paths.get(ClassLoader.getSystemResource("file/video.mp4").toURI());
Assert.assertEquals(MimeType.VIDEO_MP4, MimeTypeUtil.guessMimeType(path, "video.mp4"));
}
}

View File

@@ -0,0 +1,2 @@
col1,col2
test,me
1 col1 col2
2 test me

View File

@@ -0,0 +1 @@
test me.

Binary file not shown.

Binary file not shown.

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.4 KiB

Binary file not shown.

Binary file not shown.

View File

@@ -6,4 +6,5 @@ log4j.appender.MEMORY=com.sismics.util.log4j.MemoryAppender
log4j.appender.MEMORY.size=1000
log4j.logger.com.sismics=INFO
log4j.logger.org.hibernate=ERROR
log4j.logger.org.hibernate=ERROR
log4j.logger.org.apache.directory=ERROR

16
docs-importer/Dockerfile Normal file
View File

@@ -0,0 +1,16 @@
FROM node:14.2-alpine AS builder
WORKDIR /build
COPY main.js package-lock.json package.json ./
RUN npm install && npm install -g pkg
RUN pkg -t node14-alpine-x64 .
FROM alpine
ENV TEEDY_TAG= TEEDY_ADDTAGS=false TEEDY_LANG=eng TEEDY_URL='http://localhost:8080' TEEDY_USERNAME=username TEEDY_PASSWORD=password TEEDY_COPYFOLDER= TEEDY_FILEFILTER=*
RUN apk add --no-cache \
libc6-compat \
libstdc++
ADD pref /root/.config/preferences/com.sismics.docs.importer.pref
ADD env.sh /
COPY --from=builder /build/teedy-importer ./
CMD ["/bin/ash","-c","/env.sh && /teedy-importer -d"]

View File

@@ -1,35 +1,53 @@
File Importer
=============
# File Importer
This tool can be used to do a single import of files or to periodically scan for files in an input folder.
Downloads
---------
## Downloads
Built binaries for Windows/Linux/MacOSX can be found at <https://github.com/sismics/docs/releases>
Usage
-----
## Usage
```console
./docs-importer-macos (for MacOSX)
./docs-importer-linux (for Linux)
docs-importer-win.exe (for Windows)
```
A wizard will ask you for the import configuration and write it in `~/.config/preferences/com.sismics.docs.importer.pref`
A wizard will ask you for the import configuration and write it in `~/.config/preferences/com.sismics.docs.importer.pref`.
Words following a `#` in the filename will be added as tags to the document, if there is a tag with the same name on the Server.
For the next start, pass the `-d` argument to skip the wizard:
```console
./docs-importer-linux -d
```
Daemon mode
-----------
The daemon mode scan the input directory every 30 seconds for new files. Once a file is found and imported, it is **deleted**.
## Daemon mode
The daemon mode scan the input directory every 30 seconds for new files. Once a file is found and imported, it is **deleted**. You can set a `copyFolder` to copy the file to before deletion.
## Docker
The docker image needs a volume mounted from a previously generated preference file at `/root/.config/preferences/com.sismics.docs.importer.pref`. The container will start the importer in daemon mode. It will look for files in `/import`.
Example usage:
```
docker run --name teedy-import -d -v /path/to/preferencefile:/root/.config/preferences/com.sismics.docs.importer.pref -v /path/to/import/folder:/import sismics/docs-importer:latest
```
### Environment variables
Instead of mounting the preferences file, the options can also be set by setting the environment variables `TEEDY_TAG`, `TEEDY_ADDTAGS`, `TEEDY_LANG`, `TEEDY_COPYFOLDER`, `TEEDY_FILEFILTER`, `TEEDY_URL`, `TEEDY_USERNAME` and `TEEDY_PASSWORD`.
The latter three have to be set for the importer to work. The value of `TEEDY_TAG` has to be set to the UUID of the tag, not the name (The UUID can be found by visiting `baseUrl/api/tag/list` in your browser).
Example usage:
```
docker run --name teedy-import -d -e TEEDY_TAG=2071fdf7-0e26-409d-b53d-f25823a5eb9e -e TEEDY_ADDTAGS=false -e TEEDY_LANG=eng -e TEEDY_URL='http://teedy.example.com:port' -e TEEDY_USERNAME=username -e TEEDY_PASSWORD=superSecretPassword -v /path/to/import/folder:/import sismics/docs-importer:latest
```
## Build from sources
Build from sources
------------------
```console
npm install
npm install -g pkg
pkg .
```
```

11
docs-importer/env.sh Executable file
View File

@@ -0,0 +1,11 @@
#!/bin/ash
file=/root/.config/preferences/com.sismics.docs.importer.pref
sed -i "s/env1/$TEEDY_TAG/g" $file
sed -i "s/env2/$TEEDY_ADDTAGS/g" $file
sed -i "s/env3/$TEEDY_LANG/g" $file
sed -i "s,env4,$TEEDY_URL,g" $file
sed -i "s/env5/$TEEDY_USERNAME/g" $file
sed -i "s/env6/$TEEDY_PASSWORD/g" $file
sed -i "s,env7,$TEEDY_COPYFOLDER,g" $file
sed -i "s,env8,$TEEDY_FILEFILTER,g" $file
echo "Environment variables replaced"

View File

@@ -1,6 +1,7 @@
'use strict';
const recursive = require('recursive-readdir');
const minimatch = require("minimatch");
const ora = require('ora');
const inquirer = require('inquirer');
const preferences = require('preferences');
@@ -10,6 +11,7 @@ const _ = require('underscore');
const request = require('request').defaults({
jar: true
});
const qs = require('querystring');
// Load preferences
const prefs = new preferences('com.sismics.docs.importer',{
@@ -22,7 +24,7 @@ const prefs = new preferences('com.sismics.docs.importer',{
});
// Welcome message
console.log('Teedy Importer 1.0.0, https://teedy.io' +
console.log('Teedy Importer 1.9, https://teedy.io' +
'\n\n' +
'This program let you import files from your system to Teedy' +
'\n');
@@ -141,13 +143,32 @@ const askPath = () => {
recursive(answers.path, function (error, files) {
spinner.succeed(files.length + ' files in this directory');
askTag();
askFileFilter();
});
});
});
});
};
// Ask for the file filter
const askFileFilter = () => {
console.log('');
inquirer.prompt([
{
type: 'input',
name: 'fileFilter',
message: 'What pattern do you want to use to match files? (eg. *.+(pdf|txt|jpg))',
default: prefs.importer.fileFilter || "*"
}
]).then(answers => {
// Save fileFilter
prefs.importer.fileFilter = answers.fileFilter;
askTag();
});
};
// Ask for the tag to add
const askTag = () => {
console.log('');
@@ -176,7 +197,7 @@ const askTag = () => {
{
type: 'list',
name: 'tag',
message: 'Which tag to add on imported documents?',
message: 'Which tag to add to all imported documents?',
default: defaultTagName,
choices: [ 'No tag' ].concat(_.pluck(tags, 'name'))
}
@@ -184,11 +205,109 @@ const askTag = () => {
// Save tag
prefs.importer.tag = answers.tag === 'No tag' ?
'' : _.findWhere(tags, { name: answers.tag }).id;
askDaemon();
askAddTag();
});
});
};
const askAddTag = () => {
console.log('');
inquirer.prompt([
{
type: 'confirm',
name: 'addtags',
message: 'Do you want to add tags from the filename given with # ?',
default: prefs.importer.addtags === true
}
]).then(answers => {
// Save daemon
prefs.importer.addtags = answers.addtags;
// Save all preferences in case the program is sig-killed
askLang();
});
}
const askLang = () => {
console.log('');
// Load tags
const spinner = ora({
text: 'Loading default language',
spinner: 'flips'
}).start();
request.get({
url: prefs.importer.baseUrl + '/api/app',
}, function (error, response, body) {
if (error || !response || response.statusCode !== 200) {
spinner.fail('Connection to Teedy failed: ' + error);
askLang();
return;
}
spinner.succeed('Language loaded');
const defaultLang = prefs.importer.lang ? prefs.importer.lang : JSON.parse(body).default_language;
inquirer.prompt([
{
type: 'input',
name: 'lang',
message: 'Which should be the default language of the document?',
default: defaultLang
}
]).then(answers => {
// Save tag
prefs.importer.lang = answers.lang
askCopyFolder();
});
});
};
const askCopyFolder = () => {
console.log('');
inquirer.prompt([
{
type: 'input',
name: 'copyFolder',
message: 'Enter a path to copy files before they are deleted or leave empty to disable. The path must end with a \'/\' on MacOS and Linux or with a \'\\\' on Windows. Entering \'undefined\' will disable this again after setting the folder.',
default: prefs.importer.copyFolder
}
]).then(answers => {
// Save path
prefs.importer.copyFolder = answers.copyFolder=='undefined' ? '' : answers.copyFolder;
if (prefs.importer.copyFolder) {
// Test path
const spinner = ora({
text: 'Checking copy folder path',
spinner: 'flips'
}).start();
fs.lstat(answers.copyFolder, (error, stats) => {
if (error || !stats.isDirectory()) {
spinner.fail('Please enter a valid directory path');
askCopyFolder();
return;
}
fs.access(answers.copyFolder, fs.W_OK | fs.R_OK, (error) => {
if (error) {
spinner.fail('This directory is not writable');
askCopyFolder();
return;
}
spinner.succeed('Copy folder set!');
askDaemon();
});
});
}
else {askDaemon();}
});
};
// Ask for daemon mode
const askDaemon = () => {
console.log('');
@@ -245,6 +364,8 @@ const start = () => {
// Import the files
const importFiles = (remove, filesImported) => {
recursive(prefs.importer.path, function (error, files) {
files = files.filter(minimatch.filter(prefs.importer.fileFilter || '*', { matchBase: true }));
if (files.length === 0) {
filesImported();
return;
@@ -270,37 +391,94 @@ const importFile = (file, remove, resolve) => {
spinner: 'flips'
}).start();
request.put({
url: prefs.importer.baseUrl + '/api/document',
form: {
title: file.replace(/^.*[\\\/]/, ''),
language: 'eng',
tags: prefs.importer.tag === '' ? undefined : prefs.importer.tag
}
}, function (error, response, body) {
// Remove path of file
let filename = file.replace(/^.*[\\\/]/, '');
// Get Tags given as hashtags from filename
let taglist = filename.match(/#[^\s:#]+/mg);
taglist = taglist ? taglist.map(s => s.substr(1)) : [];
// Get available tags and UUIDs from server
request.get({
url: prefs.importer.baseUrl + '/api/tag/list',
}, function (error, response, body) {
if (error || !response || response.statusCode !== 200) {
spinner.fail('Upload failed for ' + file + ': ' + error);
resolve();
spinner.fail('Error loading tags');
return;
}
let tagsarray = {};
for (let l of JSON.parse(body).tags) {
tagsarray[l.name] = l.id;
}
request.put({
url: prefs.importer.baseUrl + '/api/file',
formData: {
id: JSON.parse(body).id,
file: fs.createReadStream(file)
// Intersect tags from filename with existing tags on server
let foundtags = [];
for (let j of taglist) {
// If the tag is last in the filename it could include a file extension and would not be recognized
if (j.includes('.') && !tagsarray.hasOwnProperty(j) && !foundtags.includes(tagsarray[j])) {
while (j.includes('.') && !tagsarray.hasOwnProperty(j)) {
j = j.replace(/\.[^.]*$/,'');
}
}
}, function (error, response) {
if (tagsarray.hasOwnProperty(j) && !foundtags.includes(tagsarray[j])) {
foundtags.push(tagsarray[j]);
filename = filename.split('#'+j).join('');
}
}
if (prefs.importer.tag !== '' && !foundtags.includes(prefs.importer.tag)){
foundtags.push(prefs.importer.tag);
}
let data = {}
if (prefs.importer.addtags) {
data = {
title: prefs.importer.addtags ? filename : file.replace(/^.*[\\\/]/, '').substring(0, 100),
language: prefs.importer.lang || 'eng',
tags: foundtags
}
}
else {
data = {
title: prefs.importer.addtags ? filename : file.replace(/^.*[\\\/]/, '').substring(0, 100),
language: prefs.importer.lang || 'eng',
tags: prefs.importer.tag === '' ? undefined : prefs.importer.tag
}
}
// Create document
request.put({
url: prefs.importer.baseUrl + '/api/document',
form: qs.stringify(data)
}, function (error, response, body) {
if (error || !response || response.statusCode !== 200) {
spinner.fail('Upload failed for ' + file + ': ' + error);
resolve();
return;
}
spinner.succeed('Upload successful for ' + file);
if (remove) {
fs.unlinkSync(file);
}
resolve();
// Upload file
request.put({
url: prefs.importer.baseUrl + '/api/file',
formData: {
id: JSON.parse(body).id,
file: fs.createReadStream(file)
}
}, function (error, response) {
if (error || !response || response.statusCode !== 200) {
spinner.fail('Upload failed for ' + file + ': ' + error);
resolve();
return;
}
spinner.succeed('Upload successful for ' + file);
if (remove) {
if (prefs.importer.copyFolder) {
fs.copyFileSync(file, prefs.importer.copyFolder + file.replace(/^.*[\\\/]/, ''));
fs.unlinkSync(file);
}
else {fs.unlinkSync(file);}
}
resolve();
});
});
});
};
@@ -312,7 +490,12 @@ if (argv.hasOwnProperty('d')) {
'Username: ' + prefs.importer.username + '\n' +
'Password: ***********\n' +
'Tag: ' + prefs.importer.tag + '\n' +
'Daemon mode: ' + prefs.importer.daemon);
'Add tags given #: ' + prefs.importer.addtags + '\n' +
'Language: ' + prefs.importer.lang + '\n' +
'Daemon mode: ' + prefs.importer.daemon + '\n' +
'Copy folder: ' + prefs.importer.copyFolder + '\n' +
'File filter: ' + prefs.importer.fileFilter
);
start();
} else {
askBaseUrl();

View File

@@ -1,6 +1,6 @@
{
"name": "docs-importer",
"version": "1.5.1",
"name": "teedy-importer",
"version": "1.9.0",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
@@ -9,10 +9,10 @@
"resolved": "https://registry.npmjs.org/ajv/-/ajv-5.5.2.tgz",
"integrity": "sha1-c7Xuyj+rZT49P5Qis0GtQiBdyWU=",
"requires": {
"co": "4.6.0",
"fast-deep-equal": "1.1.0",
"fast-json-stable-stringify": "2.0.0",
"json-schema-traverse": "0.3.1"
"co": "^4.6.0",
"fast-deep-equal": "^1.0.0",
"fast-json-stable-stringify": "^2.0.0",
"json-schema-traverse": "^0.3.0"
}
},
"ansi-escapes": {
@@ -30,7 +30,7 @@
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.0.tgz",
"integrity": "sha512-NnSOmMEYtVR2JVMIGTzynRkkaxtiq1xnFBcdQD/DnNCYPoEPsVJhM98BDyaoNOQIi7p4okdi3E27eN7GQbsUug==",
"requires": {
"color-convert": "1.9.1"
"color-convert": "^1.9.0"
}
},
"argparse": {
@@ -38,7 +38,7 @@
"resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz",
"integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==",
"requires": {
"sprintf-js": "1.0.3"
"sprintf-js": "~1.0.2"
}
},
"asn1": {
@@ -75,9 +75,8 @@
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.1.tgz",
"integrity": "sha1-Y7xdy2EzG5K8Bf1SiVPDNGKgb40=",
"optional": true,
"requires": {
"tweetnacl": "0.14.5"
"tweetnacl": "^0.14.3"
}
},
"boom": {
@@ -85,7 +84,7 @@
"resolved": "https://registry.npmjs.org/boom/-/boom-4.3.1.tgz",
"integrity": "sha1-T4owBctKfjiJ90kDD9JbluAdLjE=",
"requires": {
"hoek": "4.2.1"
"hoek": "4.x.x"
}
},
"brace-expansion": {
@@ -93,7 +92,7 @@
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
"requires": {
"balanced-match": "1.0.0",
"balanced-match": "^1.0.0",
"concat-map": "0.0.1"
}
},
@@ -107,9 +106,9 @@
"resolved": "https://registry.npmjs.org/chalk/-/chalk-2.3.1.tgz",
"integrity": "sha512-QUU4ofkDoMIVO7hcx1iPTISs88wsO8jA92RQIm4JAwZvFGGAV2hSAA1NX7oVj2Ej2Q6NDTcRDjPTFrMCRZoJ6g==",
"requires": {
"ansi-styles": "3.2.0",
"escape-string-regexp": "1.0.5",
"supports-color": "5.2.0"
"ansi-styles": "^3.2.0",
"escape-string-regexp": "^1.0.5",
"supports-color": "^5.2.0"
}
},
"chardet": {
@@ -122,7 +121,7 @@
"resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz",
"integrity": "sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU=",
"requires": {
"restore-cursor": "2.0.0"
"restore-cursor": "^2.0.0"
}
},
"cli-spinners": {
@@ -150,7 +149,7 @@
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.1.tgz",
"integrity": "sha512-mjGanIiwQJskCC18rPR6OmrZ6fm2Lc7PeGFYwCmy5J34wC6F1PzdGL6xeMfmgicfYcNLGuVFA3WzXtIDCQSZxQ==",
"requires": {
"color-name": "1.1.3"
"color-name": "^1.1.1"
}
},
"color-name": {
@@ -163,7 +162,7 @@
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.6.tgz",
"integrity": "sha1-cj599ugBrFYTETp+RFqbactjKBg=",
"requires": {
"delayed-stream": "1.0.0"
"delayed-stream": "~1.0.0"
}
},
"concat-map": {
@@ -181,7 +180,7 @@
"resolved": "https://registry.npmjs.org/cryptiles/-/cryptiles-3.1.2.tgz",
"integrity": "sha1-qJ+7Ig9c4l7FboxKqKT9e1sNKf4=",
"requires": {
"boom": "5.2.0"
"boom": "5.x.x"
},
"dependencies": {
"boom": {
@@ -189,7 +188,7 @@
"resolved": "https://registry.npmjs.org/boom/-/boom-5.2.0.tgz",
"integrity": "sha512-Z5BTk6ZRe4tXXQlkqftmsAUANpXmuwlsF5Oov8ThoMbQRzdGTA1ngYRW160GexgOgjsFOKJz0LYhoNi+2AMBUw==",
"requires": {
"hoek": "4.2.1"
"hoek": "4.x.x"
}
}
}
@@ -199,7 +198,7 @@
"resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz",
"integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=",
"requires": {
"assert-plus": "1.0.0"
"assert-plus": "^1.0.0"
}
},
"defaults": {
@@ -207,7 +206,7 @@
"resolved": "https://registry.npmjs.org/defaults/-/defaults-1.0.3.tgz",
"integrity": "sha1-xlYFHpgX2f8I7YgUd/P+QBnz730=",
"requires": {
"clone": "1.0.3"
"clone": "^1.0.2"
}
},
"delayed-stream": {
@@ -219,9 +218,8 @@
"version": "0.1.1",
"resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.1.tgz",
"integrity": "sha1-D8c6ntXw1Tw4GTOYUj735UN3dQU=",
"optional": true,
"requires": {
"jsbn": "0.1.1"
"jsbn": "~0.1.0"
}
},
"escape-string-regexp": {
@@ -244,9 +242,9 @@
"resolved": "https://registry.npmjs.org/external-editor/-/external-editor-2.1.0.tgz",
"integrity": "sha512-E44iT5QVOUJBKij4IIV3uvxuNlbKS38Tw1HiupxEIHPv9qtC2PrDYohbXV5U+1jnfIXttny8gUhj+oZvflFlzA==",
"requires": {
"chardet": "0.4.2",
"iconv-lite": "0.4.19",
"tmp": "0.0.33"
"chardet": "^0.4.0",
"iconv-lite": "^0.4.17",
"tmp": "^0.0.33"
}
},
"extsprintf": {
@@ -269,7 +267,7 @@
"resolved": "https://registry.npmjs.org/figures/-/figures-2.0.0.tgz",
"integrity": "sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI=",
"requires": {
"escape-string-regexp": "1.0.5"
"escape-string-regexp": "^1.0.5"
}
},
"forever-agent": {
@@ -282,9 +280,9 @@
"resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.2.tgz",
"integrity": "sha1-SXBJi+YEwgwAXU9cI67NIda0kJk=",
"requires": {
"asynckit": "0.4.0",
"asynckit": "^0.4.0",
"combined-stream": "1.0.6",
"mime-types": "2.1.18"
"mime-types": "^2.1.12"
}
},
"getpass": {
@@ -292,7 +290,7 @@
"resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz",
"integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=",
"requires": {
"assert-plus": "1.0.0"
"assert-plus": "^1.0.0"
}
},
"graceful-fs": {
@@ -310,8 +308,8 @@
"resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.0.3.tgz",
"integrity": "sha1-ukAsJmGU8VlW7xXg/PJCmT9qff0=",
"requires": {
"ajv": "5.5.2",
"har-schema": "2.0.0"
"ajv": "^5.1.0",
"har-schema": "^2.0.0"
}
},
"has-flag": {
@@ -324,10 +322,10 @@
"resolved": "https://registry.npmjs.org/hawk/-/hawk-6.0.2.tgz",
"integrity": "sha512-miowhl2+U7Qle4vdLqDdPt9m09K6yZhkLDTWGoUiUzrQCn+mHHSmfJgAyGaLRZbPmTqfFFjRV1QWCW0VWUJBbQ==",
"requires": {
"boom": "4.3.1",
"cryptiles": "3.1.2",
"hoek": "4.2.1",
"sntp": "2.1.0"
"boom": "4.x.x",
"cryptiles": "3.x.x",
"hoek": "4.x.x",
"sntp": "2.x.x"
}
},
"hoek": {
@@ -340,9 +338,9 @@
"resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz",
"integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=",
"requires": {
"assert-plus": "1.0.0",
"jsprim": "1.4.1",
"sshpk": "1.13.1"
"assert-plus": "^1.0.0",
"jsprim": "^1.2.2",
"sshpk": "^1.7.0"
}
},
"iconv-lite": {
@@ -360,19 +358,19 @@
"resolved": "https://registry.npmjs.org/inquirer/-/inquirer-5.1.0.tgz",
"integrity": "sha512-kn7N70US1MSZHZHSGJLiZ7iCwwncc7b0gc68YtlX29OjI3Mp0tSVV+snVXpZ1G+ONS3Ac9zd1m6hve2ibLDYfA==",
"requires": {
"ansi-escapes": "3.0.0",
"chalk": "2.3.1",
"cli-cursor": "2.1.0",
"cli-width": "2.2.0",
"external-editor": "2.1.0",
"figures": "2.0.0",
"lodash": "4.17.5",
"ansi-escapes": "^3.0.0",
"chalk": "^2.0.0",
"cli-cursor": "^2.1.0",
"cli-width": "^2.0.0",
"external-editor": "^2.1.0",
"figures": "^2.0.0",
"lodash": "^4.3.0",
"mute-stream": "0.0.7",
"run-async": "2.3.0",
"rxjs": "5.5.6",
"string-width": "2.1.1",
"strip-ansi": "4.0.0",
"through": "2.3.8"
"run-async": "^2.2.0",
"rxjs": "^5.5.2",
"string-width": "^2.1.0",
"strip-ansi": "^4.0.0",
"through": "^2.3.6"
}
},
"is-fullwidth-code-point": {
@@ -396,19 +394,18 @@
"integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo="
},
"js-yaml": {
"version": "3.10.0",
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.10.0.tgz",
"integrity": "sha512-O2v52ffjLa9VeM43J4XocZE//WT9N0IiwDa3KSHH7Tu8CtH+1qM8SIZvnsTh6v+4yFy5KUY3BHUVwjpfAWsjIA==",
"version": "3.13.1",
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.13.1.tgz",
"integrity": "sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==",
"requires": {
"argparse": "1.0.10",
"esprima": "4.0.0"
"argparse": "^1.0.7",
"esprima": "^4.0.0"
}
},
"jsbn": {
"version": "0.1.1",
"resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz",
"integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=",
"optional": true
"integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM="
},
"json-schema": {
"version": "0.2.3",
@@ -437,16 +434,16 @@
}
},
"lodash": {
"version": "4.17.5",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.5.tgz",
"integrity": "sha512-svL3uiZf1RwhH+cWrfZn3A4+U58wbP0tGVTLQPbjplZxZ8ROD9VLuNgsRniTlLe7OlSqR79RUehXgpBW/s0IQw=="
"version": "4.17.15",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz",
"integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A=="
},
"log-symbols": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-2.2.0.tgz",
"integrity": "sha512-VeIAFslyIerEJLXHziedo2basKbMKtTw3vfn5IzG0XTjhAVEJyNHnL2p7vc+wBDSdQuUpNw3M2u6xb9QsAY5Eg==",
"requires": {
"chalk": "2.3.1"
"chalk": "^2.0.1"
}
},
"mime-db": {
@@ -459,7 +456,7 @@
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.18.tgz",
"integrity": "sha512-lc/aahn+t4/SWV/qcmumYjymLsWfN3ELhpmVuUFjgsORruuZPVSwAQryq+HHGvO/SI2KVX26bx+En+zhM8g8hQ==",
"requires": {
"mime-db": "1.33.0"
"mime-db": "~1.33.0"
}
},
"mimic-fn": {
@@ -472,27 +469,20 @@
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
"requires": {
"brace-expansion": "1.1.11"
"brace-expansion": "^1.1.7"
}
},
"minimist": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.0.tgz",
"integrity": "sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ="
"version": "1.2.5",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.5.tgz",
"integrity": "sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw=="
},
"mkdirp": {
"version": "0.5.1",
"resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz",
"integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=",
"version": "0.5.5",
"resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz",
"integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==",
"requires": {
"minimist": "0.0.8"
},
"dependencies": {
"minimist": {
"version": "0.0.8",
"resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz",
"integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0="
}
"minimist": "^1.2.5"
}
},
"mute-stream": {
@@ -510,7 +500,7 @@
"resolved": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz",
"integrity": "sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ=",
"requires": {
"mimic-fn": "1.2.0"
"mimic-fn": "^1.0.0"
}
},
"ora": {
@@ -518,12 +508,12 @@
"resolved": "https://registry.npmjs.org/ora/-/ora-2.0.0.tgz",
"integrity": "sha512-g+IR0nMUXq1k4nE3gkENbN4wkF0XsVZFyxznTF6CdmwQ9qeTGONGpSR9LM5//1l0TVvJoJF3MkMtJp6slUsWFg==",
"requires": {
"chalk": "2.3.1",
"cli-cursor": "2.1.0",
"cli-spinners": "1.1.0",
"log-symbols": "2.2.0",
"strip-ansi": "4.0.0",
"wcwidth": "1.0.1"
"chalk": "^2.3.1",
"cli-cursor": "^2.1.0",
"cli-spinners": "^1.1.0",
"log-symbols": "^2.2.0",
"strip-ansi": "^4.0.0",
"wcwidth": "^1.0.1"
}
},
"os-homedir": {
@@ -546,11 +536,11 @@
"resolved": "https://registry.npmjs.org/preferences/-/preferences-1.0.2.tgz",
"integrity": "sha512-cRjA8Galk1HDDBOKjx6DhTwfy5+FVZtH7ogg6rgTLX8Ak4wi55RaS4uRztJuVPd+md1jZo99bH/h1Q9bQQK8bg==",
"requires": {
"graceful-fs": "4.1.11",
"js-yaml": "3.10.0",
"mkdirp": "0.5.1",
"os-homedir": "1.0.2",
"write-file-atomic": "1.3.4"
"graceful-fs": "^4.1.2",
"js-yaml": "^3.10.0",
"mkdirp": "^0.5.1",
"os-homedir": "^1.0.1",
"write-file-atomic": "^1.1.3"
}
},
"punycode": {
@@ -559,9 +549,9 @@
"integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4="
},
"qs": {
"version": "6.5.1",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.5.1.tgz",
"integrity": "sha512-eRzhrN1WSINYCDCbrz796z37LOe3m5tmW7RQf6oBntukAG1nmovJvhnwHHRMAfeoItc1m2Hk02WER2aQ/iqs+A=="
"version": "6.9.4",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.9.4.tgz",
"integrity": "sha512-A1kFqHekCTM7cz0udomYUoYNWjBebHm/5wzU/XqrBRBNWectVH0QIiN+NEcZ0Dte5hvzHwbr8+XQmguPhJ6WdQ=="
},
"recursive-readdir": {
"version": "2.2.2",
@@ -576,28 +566,35 @@
"resolved": "https://registry.npmjs.org/request/-/request-2.83.0.tgz",
"integrity": "sha512-lR3gD69osqm6EYLk9wB/G1W/laGWjzH90t1vEa2xuxHD5KUrSzp9pUSfTm+YC5Nxt2T8nMPEvKlhbQayU7bgFw==",
"requires": {
"aws-sign2": "0.7.0",
"aws4": "1.6.0",
"caseless": "0.12.0",
"combined-stream": "1.0.6",
"extend": "3.0.1",
"forever-agent": "0.6.1",
"form-data": "2.3.2",
"har-validator": "5.0.3",
"hawk": "6.0.2",
"http-signature": "1.2.0",
"is-typedarray": "1.0.0",
"isstream": "0.1.2",
"json-stringify-safe": "5.0.1",
"mime-types": "2.1.18",
"oauth-sign": "0.8.2",
"performance-now": "2.1.0",
"qs": "6.5.1",
"safe-buffer": "5.1.1",
"stringstream": "0.0.5",
"tough-cookie": "2.3.4",
"tunnel-agent": "0.6.0",
"uuid": "3.2.1"
"aws-sign2": "~0.7.0",
"aws4": "^1.6.0",
"caseless": "~0.12.0",
"combined-stream": "~1.0.5",
"extend": "~3.0.1",
"forever-agent": "~0.6.1",
"form-data": "~2.3.1",
"har-validator": "~5.0.3",
"hawk": "~6.0.2",
"http-signature": "~1.2.0",
"is-typedarray": "~1.0.0",
"isstream": "~0.1.2",
"json-stringify-safe": "~5.0.1",
"mime-types": "~2.1.17",
"oauth-sign": "~0.8.2",
"performance-now": "^2.1.0",
"qs": "~6.5.1",
"safe-buffer": "^5.1.1",
"stringstream": "~0.0.5",
"tough-cookie": "~2.3.3",
"tunnel-agent": "^0.6.0",
"uuid": "^3.1.0"
},
"dependencies": {
"qs": {
"version": "6.5.2",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz",
"integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA=="
}
}
},
"restore-cursor": {
@@ -605,8 +602,8 @@
"resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz",
"integrity": "sha1-n37ih/gv0ybU/RYpI9YhKe7g368=",
"requires": {
"onetime": "2.0.1",
"signal-exit": "3.0.2"
"onetime": "^2.0.0",
"signal-exit": "^3.0.2"
}
},
"run-async": {
@@ -614,7 +611,7 @@
"resolved": "https://registry.npmjs.org/run-async/-/run-async-2.3.0.tgz",
"integrity": "sha1-A3GrSuC91yDUFm19/aZP96RFpsA=",
"requires": {
"is-promise": "2.1.0"
"is-promise": "^2.1.0"
}
},
"rxjs": {
@@ -630,6 +627,11 @@
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz",
"integrity": "sha512-kKvNJn6Mm93gAczWVJg7wH+wGYWNrDHdWvpUmHyEsgCtIwwo3bqPtV4tR5tuPaUhTOo/kvhVwd8XwwOllGYkbg=="
},
"safer-buffer": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
},
"signal-exit": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.2.tgz",
@@ -645,7 +647,7 @@
"resolved": "https://registry.npmjs.org/sntp/-/sntp-2.1.0.tgz",
"integrity": "sha512-FL1b58BDrqS3A11lJ0zEdnJ3UOKqVxawAkF3k7F0CVN7VQ34aZrV+G8BZ1WC9ZL7NyrwsW0oviwsWDgRuVYtJg==",
"requires": {
"hoek": "4.2.1"
"hoek": "4.x.x"
}
},
"sprintf-js": {
@@ -654,18 +656,19 @@
"integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw="
},
"sshpk": {
"version": "1.13.1",
"resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.13.1.tgz",
"integrity": "sha1-US322mKHFEMW3EwY/hzx2UBzm+M=",
"version": "1.16.1",
"resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz",
"integrity": "sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg==",
"requires": {
"asn1": "0.2.3",
"assert-plus": "1.0.0",
"bcrypt-pbkdf": "1.0.1",
"dashdash": "1.14.1",
"ecc-jsbn": "0.1.1",
"getpass": "0.1.7",
"jsbn": "0.1.1",
"tweetnacl": "0.14.5"
"asn1": "~0.2.3",
"assert-plus": "^1.0.0",
"bcrypt-pbkdf": "^1.0.0",
"dashdash": "^1.12.0",
"ecc-jsbn": "~0.1.1",
"getpass": "^0.1.1",
"jsbn": "~0.1.0",
"safer-buffer": "^2.0.2",
"tweetnacl": "~0.14.0"
}
},
"string-width": {
@@ -673,8 +676,8 @@
"resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz",
"integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==",
"requires": {
"is-fullwidth-code-point": "2.0.0",
"strip-ansi": "4.0.0"
"is-fullwidth-code-point": "^2.0.0",
"strip-ansi": "^4.0.0"
}
},
"stringstream": {
@@ -687,7 +690,7 @@
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz",
"integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=",
"requires": {
"ansi-regex": "3.0.0"
"ansi-regex": "^3.0.0"
}
},
"supports-color": {
@@ -695,7 +698,7 @@
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.2.0.tgz",
"integrity": "sha512-F39vS48la4YvTZUPVeTqsjsFNrvcMwrV3RLZINsmHo+7djCvuUzSIeXOnZ5hmjef4bajL1dNccN+tg5XAliO5Q==",
"requires": {
"has-flag": "3.0.0"
"has-flag": "^3.0.0"
}
},
"symbol-observable": {
@@ -713,7 +716,7 @@
"resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz",
"integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==",
"requires": {
"os-tmpdir": "1.0.2"
"os-tmpdir": "~1.0.2"
}
},
"tough-cookie": {
@@ -721,7 +724,7 @@
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.3.4.tgz",
"integrity": "sha512-TZ6TTfI5NtZnuyy/Kecv+CnoROnyXn2DN97LontgQpCwsX2XyLYCC0ENhYkehSOwAp8rTQKc/NUIF7BkQ5rKLA==",
"requires": {
"punycode": "1.4.1"
"punycode": "^1.4.1"
}
},
"tunnel-agent": {
@@ -729,14 +732,13 @@
"resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz",
"integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=",
"requires": {
"safe-buffer": "5.1.1"
"safe-buffer": "^5.0.1"
}
},
"tweetnacl": {
"version": "0.14.5",
"resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz",
"integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=",
"optional": true
"integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q="
},
"underscore": {
"version": "1.8.3",
@@ -753,9 +755,9 @@
"resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz",
"integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=",
"requires": {
"assert-plus": "1.0.0",
"assert-plus": "^1.0.0",
"core-util-is": "1.0.2",
"extsprintf": "1.3.0"
"extsprintf": "^1.2.0"
}
},
"wcwidth": {
@@ -763,7 +765,7 @@
"resolved": "https://registry.npmjs.org/wcwidth/-/wcwidth-1.0.1.tgz",
"integrity": "sha1-8LDc+RW8X/FSivrbLA4XtTLaL+g=",
"requires": {
"defaults": "1.0.3"
"defaults": "^1.0.3"
}
},
"write-file-atomic": {
@@ -771,9 +773,9 @@
"resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-1.3.4.tgz",
"integrity": "sha1-+Aek8LHZ6ROuekgRLmzDrxmRtF8=",
"requires": {
"graceful-fs": "4.1.11",
"imurmurhash": "0.1.4",
"slide": "1.1.6"
"graceful-fs": "^4.1.11",
"imurmurhash": "^0.1.4",
"slide": "^1.1.5"
}
}
}

View File

@@ -1,6 +1,6 @@
{
"name": "teedy-importer",
"version": "1.5.1",
"version": "1.9.0",
"description": "Import files to Teedy",
"bin": "main.js",
"scripts": {
@@ -11,6 +11,9 @@
"url": "git+https://github.com/sismics/docs.git"
},
"author": "Benjamin Gamard",
"contributors": [
"Cornelius Hoffmann <coding@hoffmn.de>"
],
"license": "GPL-2.0",
"bugs": {
"url": "https://github.com/sismics/docs/issues"
@@ -18,10 +21,12 @@
"homepage": "https://github.com/sismics/docs#readme",
"dependencies": {
"inquirer": "^5.1.0",
"minimist": "^1.2.0",
"minimist": "^1.2.5",
"ora": "^2.0.0",
"preferences": "^1.0.2",
"qs": "^6.9.4",
"recursive-readdir": "^2.2.2",
"minimatch": "^3.0.4",
"request": "^2.83.0",
"underscore": "^1.8.3"
}

11
docs-importer/pref Normal file
View File

@@ -0,0 +1,11 @@
importer:
daemon: true
path: import
tag: 'env1'
addtags: 'env2'
lang: 'env3'
baseUrl: 'env4'
username: 'env5'
password: 'env6'
copyFolder: 'env7'
fileFilter: 'env8'

View File

@@ -1,81 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<parent>
<groupId>com.sismics.docs</groupId>
<artifactId>docs-parent</artifactId>
<version>1.8</version>
<relativePath>..</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>docs-stress</artifactId>
<packaging>jar</packaging>
<name>Docs Stress</name>
<dependencies>
<!-- Dependencies to Jersey -->
<dependency>
<groupId>org.glassfish.jersey.core</groupId>
<artifactId>jersey-client</artifactId>
</dependency>
<dependency>
<groupId>org.glassfish.jersey.media</groupId>
<artifactId>jersey-media-multipart</artifactId>
</dependency>
<!-- Depenedencies to Docs -->
<dependency>
<groupId>com.sismics.docs</groupId>
<artifactId>docs-web-common</artifactId>
</dependency>
<dependency>
<groupId>com.sismics.docs</groupId>
<artifactId>docs-web-common</artifactId>
<type>test-jar</type>
</dependency>
<!-- Other external dependencies -->
<dependency>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>jcl-over-slf4j</artifactId>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
</dependency>
</dependencies>
<build>
<resources>
<resource>
<directory>src/main/resources</directory>
</resource>
</resources>
</build>
</project>

View File

@@ -1,135 +0,0 @@
package com.sismics.docs.stress;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.io.Resources;
import com.sismics.docs.rest.util.ClientUtil;
import com.sismics.util.filter.TokenBasedSecurityFilter;
import org.glassfish.jersey.client.ClientResponse;
import org.glassfish.jersey.media.multipart.FormDataMultiPart;
import org.glassfish.jersey.media.multipart.MultiPartFeature;
import org.glassfish.jersey.media.multipart.file.StreamDataBodyPart;
import org.junit.Assert;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.json.JsonObject;
import javax.ws.rs.client.*;
import javax.ws.rs.core.Form;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response.Status;
import java.io.InputStream;
import java.util.Date;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ThreadLocalRandom;
/**
* Stress app for Teedy.
*
* @author bgamard
*/
public class Main {
/**
* Logger.
*/
private static final Logger log = LoggerFactory.getLogger(Main.class);
private static final String API_URL = "http://localhost:9999/docs-web/api/";
private static final int USER_COUNT = 50;
private static final int DOCUMENT_PER_USER_COUNT = 2000;
private static final int TAG_PER_USER_COUNT = 20;
private static final int FILE_PER_DOCUMENT_COUNT = 10;
private static Client client = ClientBuilder.newClient();
private static Set<User> userSet = Sets.newHashSet();
/**
* Entry point.
*
* @param args Args
* @throws Exception
*/
public static void main(String[] args) throws Exception {
log.info("Starting stress test...");
WebTarget resource = client.target(API_URL);
ClientUtil clientUtil = new ClientUtil(resource);
// Create users
for (int i = 0; i < USER_COUNT; i++) {
String username = generateString();
clientUtil.createUser(username);
userSet.add(new User(username, (clientUtil.login(username))));
log.info("Created user " + (i + 1) + "/" + USER_COUNT);
}
// Create tags for each user
int tagCreatedCount = 1;
for (User user : userSet) {
Invocation.Builder tagResource = resource.path("/tag").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, user.authToken);
for (int j = 0; j < TAG_PER_USER_COUNT; j++) {
Form form = new Form();
String name = generateString();
form.param("name", name);
form.param("color", "#ff0000");
JsonObject json = tagResource.put(Entity.form(form), JsonObject.class);
user.tagList.add(json.getString("id"));
log.info("Created tag " + (tagCreatedCount++) + "/" + TAG_PER_USER_COUNT * USER_COUNT);
}
}
// Create documents for each user
int documentCreatedCount = 1;
for (User user : userSet) {
for (int i = 0; i < DOCUMENT_PER_USER_COUNT; i++) {
long createDate = new Date().getTime();
Form form = new Form()
.param("title", generateString())
.param("description", generateString())
.param("tags", user.tagList.get(ThreadLocalRandom.current().nextInt(user.tagList.size()))) // Random tag
.param("language", "eng")
.param("create_date", Long.toString(createDate));
JsonObject json = resource.path("/document").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, user.authToken)
.put(Entity.form(form), JsonObject.class);
String documentId = json.getString("id");
log.info("Created document " + (documentCreatedCount++) + "/" + DOCUMENT_PER_USER_COUNT * USER_COUNT + " for user: " + user.username);
// Add files for each document
for (int j = 0; j < FILE_PER_DOCUMENT_COUNT; j++) {
try (InputStream is = Resources.getResource("empty.png").openStream()) {
StreamDataBodyPart streamDataBodyPart = new StreamDataBodyPart("file", is, "empty.png");
@SuppressWarnings("resource")
ClientResponse response = resource
.register(MultiPartFeature.class)
.path("/file").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, user.authToken)
.put(Entity.entity(new FormDataMultiPart().field("id", documentId).bodyPart(streamDataBodyPart),
MediaType.MULTIPART_FORM_DATA_TYPE), ClientResponse.class);
Assert.assertEquals(Status.OK, Status.fromStatusCode(response.getStatus()));
}
}
}
}
}
private static String generateString() {
return UUID.randomUUID().toString().replace("-", "");
}
private static class User {
String username;
List<String> tagList = Lists.newArrayList();
String authToken;
User(String username, String authToken) {
this.username = username;
this.authToken = authToken;
}
}
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 921 B

View File

@@ -1,6 +0,0 @@
log4j.rootCategory=WARN, CONSOLE
log4j.appender.CONSOLE=org.apache.log4j.ConsoleAppender
log4j.appender.CONSOLE.layout=org.apache.log4j.PatternLayout
log4j.appender.CONSOLE.layout.ConversionPattern=%d{DATE} %p %l %m %n
log4j.logger.com.sismics=DEBUG

View File

@@ -5,7 +5,7 @@
<parent>
<groupId>com.sismics.docs</groupId>
<artifactId>docs-parent</artifactId>
<version>1.8</version>
<version>1.11</version>
<relativePath>..</relativePath>
</parent>

View File

@@ -0,0 +1,40 @@
package com.sismics.rest.util;
import com.sismics.docs.core.model.jpa.File;
import com.sismics.docs.core.util.DirectoryUtil;
import com.sismics.docs.core.util.FileUtil;
import com.sismics.rest.exception.ServerException;
import com.sismics.util.JsonUtil;
import javax.json.Json;
import javax.json.JsonObjectBuilder;
import java.io.IOException;
import java.nio.file.Files;
/**
* Rest utilities.
*
* @author bgamard
*/
public class RestUtil {
/**
* Transform a File into its JSON representation
* @param fileDb a file
* @return the JSON
*/
public static JsonObjectBuilder fileToJsonObjectBuilder(File fileDb) {
try {
return Json.createObjectBuilder()
.add("id", fileDb.getId())
.add("processing", FileUtil.isProcessingFile(fileDb.getId()))
.add("name", JsonUtil.nullable(fileDb.getName()))
.add("version", fileDb.getVersion())
.add("mimetype", fileDb.getMimeType())
.add("document_id", JsonUtil.nullable(fileDb.getDocumentId()))
.add("create_date", fileDb.getCreateDate().getTime())
.add("size", Files.size(DirectoryUtil.getStorageDirectory().resolve(fileDb.getId())));
} catch (IOException e) {
throw new ServerException("FileError", "Unable to get the size of " + fileDb.getId(), e);
}
}
}

View File

@@ -21,6 +21,8 @@ public class ValidationUtil {
private static Pattern ALPHANUMERIC_PATTERN = Pattern.compile("[a-zA-Z0-9_]+");
private static Pattern USERNAME_PATTERN = Pattern.compile("[a-zA-Z0-9_@\\.]+");
/**
* Checks that the argument is not null.
*
@@ -152,6 +154,12 @@ public class ValidationUtil {
}
}
public static void validateUsername(String s, String name) throws ClientException {
if (!USERNAME_PATTERN.matcher(s).matches()) {
throw new ClientException("ValidationError", MessageFormat.format("{0} must have only alphanumeric, underscore characters or @ and .", name));
}
}
public static void validateRegex(String s, String name, String regex) throws ClientException {
if (!Pattern.compile(regex).matcher(s).matches()) {
throw new ClientException("ValidationError", MessageFormat.format("{0} must match {1}", name, regex));

View File

@@ -26,6 +26,7 @@ import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.net.URI;
import java.util.List;
import java.util.Objects;
/**
* Base class of integration tests with Jersey.
@@ -33,6 +34,16 @@ import java.util.List;
* @author jtremeaux
*/
public abstract class BaseJerseyTest extends JerseyTest {
protected static final String FILE_APACHE_PPTX = "file/apache.pptx";
protected static final String FILE_DOCUMENT_DOCX = "file/document.docx";
protected static final String FILE_DOCUMENT_ODT = "file/document.odt";
protected static final String FILE_DOCUMENT_TXT = "file/document.txt";
protected static final String FILE_EINSTEIN_ROOSEVELT_LETTER_PNG = "file/Einstein-Roosevelt-letter.png";
protected static final String FILE_PIA_00452_JPG = "file/PIA00452.jpg";
protected static final String FILE_VIDEO_WEBM = "file/video.webm";
protected static final String FILE_WIKIPEDIA_PDF = "file/wikipedia.pdf";
protected static final String FILE_WIKIPEDIA_ZIP = "file/wikipedia.zip";
/**
* Test HTTP server.
*/
@@ -56,7 +67,7 @@ public abstract class BaseJerseyTest extends JerseyTest {
@Override
protected Application configure() {
String travisEnv = System.getenv("TRAVIS");
if (travisEnv == null || !travisEnv.equals("true")) {
if (!Objects.equals(travisEnv, "true")) {
// Travis doesn't like big logs
enable(TestProperties.LOG_TRAFFIC);
enable(TestProperties.DUMP_ENTITY);

View File

@@ -3,8 +3,10 @@ package com.sismics.docs.rest.util;
import com.google.common.io.Resources;
import com.sismics.util.filter.TokenBasedSecurityFilter;
import org.glassfish.jersey.media.multipart.FormDataMultiPart;
import org.glassfish.jersey.media.multipart.MultiPart;
import org.glassfish.jersey.media.multipart.MultiPartFeature;
import org.glassfish.jersey.media.multipart.file.StreamDataBodyPart;
import org.junit.Assert;
import javax.json.JsonObject;
import javax.ws.rs.client.Entity;
@@ -15,6 +17,12 @@ import javax.ws.rs.core.NewCookie;
import javax.ws.rs.core.Response;
import java.io.IOException;
import java.io.InputStream;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Date;
/**
* REST client utilities.
@@ -113,7 +121,8 @@ public class ClientUtil {
.param("username", username)
.param("password", password)
.param("remember", remember.toString())));
Assert.assertEquals(200, response.getStatus());
return getAuthenticationCookie(response);
}
@@ -154,27 +163,58 @@ public class ClientUtil {
return authToken;
}
/**
* Create a document
*
* @param token Authentication token
* @return Document ID
*/
public String createDocument(String token) {
JsonObject json = this.resource.path("/document").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, token)
.put(Entity.form(new Form()
.param("title", "Document Title")
.param("description", "Document description")
.param("language", "eng")
.param("create_date", Long.toString(new Date().getTime()))), JsonObject.class);
String documentId = json.getString("id");
Assert.assertNotNull(documentId);
return documentId;
}
/**
* Add a file to a document.
*
* @param file File path
* @param filename Filename
* @param token Authentication token
* @param documentId Document ID
* @return File ID
* @throws IOException e
* @throws URISyntaxException e
*/
public String addFileToDocument(String file, String filename, String token, String documentId) throws IOException {
try (InputStream is = Resources.getResource(file).openStream()) {
public String addFileToDocument(String file, String token, String documentId) throws IOException, URISyntaxException {
URL fileResource = Resources.getResource(file);
Path filePath = Paths.get(fileResource.toURI());
String filename = filePath.getFileName().toString();
try (InputStream is = fileResource.openStream()) {
StreamDataBodyPart streamDataBodyPart = new StreamDataBodyPart("file", is, filename);
try (FormDataMultiPart multiPart = new FormDataMultiPart()) {
JsonObject json = resource
MultiPart formContent;
if (documentId != null) {
formContent = multiPart.field("id", documentId).bodyPart(streamDataBodyPart);
} else {
formContent = multiPart.bodyPart(streamDataBodyPart);
}
JsonObject json = this.resource
.register(MultiPartFeature.class)
.path("/file").request()
.cookie(TokenBasedSecurityFilter.COOKIE_NAME, token)
.put(Entity.entity(multiPart.field("id", documentId).bodyPart(streamDataBodyPart),
.put(Entity.entity(formContent,
MediaType.MULTIPART_FORM_DATA_TYPE), JsonObject.class);
return json.getString("id");
String fileId = json.getString("id");
Assert.assertNotNull(fileId);
Assert.assertEquals(Files.size(filePath), json.getJsonNumber("size").longValue());
return fileId;
}
}
}

View File

@@ -5,7 +5,7 @@
<parent>
<groupId>com.sismics.docs</groupId>
<artifactId>docs-parent</artifactId>
<version>1.8</version>
<version>1.11</version>
<relativePath>..</relativePath>
</parent>
@@ -138,7 +138,7 @@
<artifactId>greenmail</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
@@ -209,54 +209,6 @@
</build>
</profile>
<!-- Stress profile -->
<profile>
<id>stress</id>
<activation>
<property>
<name>env</name>
<value>stress</value>
</property>
</activation>
<build>
<resources>
<resource>
<directory>src/stress/resources</directory>
<filtering>false</filtering>
<excludes>
<exclude>**/config.properties</exclude>
</excludes>
</resource>
<resource>
<directory>src/stress/resources</directory>
<filtering>true</filtering>
<includes>
<include>**/config.properties</include>
</includes>
</resource>
</resources>
<plugins>
<plugin>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-maven-plugin</artifactId>
<configuration>
<systemProperties>
<systemProperty>
<name>application.mode</name>
<value>dev</value>
</systemProperty>
</systemProperties>
<webApp>
<contextPath>/docs-web</contextPath>
</webApp>
</configuration>
</plugin>
</plugins>
</build>
</profile>
<!-- Production profile -->
<profile>
<id>prod</id>

View File

@@ -1,3 +1,3 @@
api.current_version=${project.version}
api.min_version=1.0
db.version=24
db.version=27

View File

@@ -7,4 +7,5 @@ log4j.appender.MEMORY.size=1000
log4j.logger.com.sismics=DEBUG
log4j.logger.org.apache.pdfbox=ERROR
log4j.logger.org.glassfish.jersey.servlet.WebComponent=ERROR
log4j.logger.org.glassfish.jersey.servlet.WebComponent=ERROR
log4j.logger.org.apache.directory=ERROR

View File

@@ -14,6 +14,7 @@ import com.sismics.docs.core.model.jpa.File;
import com.sismics.docs.core.service.InboxService;
import com.sismics.docs.core.util.ConfigUtil;
import com.sismics.docs.core.util.DirectoryUtil;
import com.sismics.docs.core.util.authentication.LdapAuthenticationHandler;
import com.sismics.docs.core.util.jpa.PaginatedList;
import com.sismics.docs.core.util.jpa.PaginatedLists;
import com.sismics.docs.rest.constant.BaseFunction;
@@ -204,28 +205,28 @@ public class AppResource extends BaseResource {
Config passwordConfig = configDao.getById(ConfigType.SMTP_PASSWORD);
Config fromConfig = configDao.getById(ConfigType.SMTP_FROM);
JsonObjectBuilder response = Json.createObjectBuilder();
if (System.getenv(Constants.SMTP_HOSTNAME_ENV) == null) {
if (Strings.isNullOrEmpty(System.getenv(Constants.SMTP_HOSTNAME_ENV))) {
if (hostnameConfig == null) {
response.addNull("hostname");
} else {
response.add("hostname", hostnameConfig.getValue());
}
}
if (System.getenv(Constants.SMTP_PORT_ENV) == null) {
if (Strings.isNullOrEmpty(System.getenv(Constants.SMTP_PORT_ENV))) {
if (portConfig == null) {
response.addNull("port");
} else {
response.add("port", Integer.valueOf(portConfig.getValue()));
}
}
if (System.getenv(Constants.SMTP_USERNAME_ENV) == null) {
if (Strings.isNullOrEmpty(System.getenv(Constants.SMTP_USERNAME_ENV))) {
if (usernameConfig == null) {
response.addNull("username");
} else {
response.add("username", usernameConfig.getValue());
}
}
if (System.getenv(Constants.SMTP_PASSWORD_ENV) == null) {
if (Strings.isNullOrEmpty(System.getenv(Constants.SMTP_PASSWORD_ENV))) {
if (passwordConfig == null) {
response.addNull("password");
} else {
@@ -311,6 +312,7 @@ public class AppResource extends BaseResource {
* @apiSuccess {String} port IMAP port
* @apiSuccess {String} username IMAP username
* @apiSuccess {String} password IMAP password
* @apiSuccess {String} folder IMAP folder
* @apiSuccess {String} tag Tag for created documents
* @apiError (client) ForbiddenError Access denied
* @apiPermission admin
@@ -328,14 +330,19 @@ public class AppResource extends BaseResource {
ConfigDao configDao = new ConfigDao();
Boolean enabled = ConfigUtil.getConfigBooleanValue(ConfigType.INBOX_ENABLED);
Boolean autoTags = ConfigUtil.getConfigBooleanValue(ConfigType.INBOX_AUTOMATIC_TAGS);
Boolean deleteImported = ConfigUtil.getConfigBooleanValue(ConfigType.INBOX_DELETE_IMPORTED);
Config hostnameConfig = configDao.getById(ConfigType.INBOX_HOSTNAME);
Config portConfig = configDao.getById(ConfigType.INBOX_PORT);
Config usernameConfig = configDao.getById(ConfigType.INBOX_USERNAME);
Config passwordConfig = configDao.getById(ConfigType.INBOX_PASSWORD);
Config folderConfig = configDao.getById(ConfigType.INBOX_FOLDER);
Config tagConfig = configDao.getById(ConfigType.INBOX_TAG);
JsonObjectBuilder response = Json.createObjectBuilder();
response.add("enabled", enabled);
response.add("autoTagsEnabled", autoTags);
response.add("deleteImported", deleteImported);
if (hostnameConfig == null) {
response.addNull("hostname");
} else {
@@ -356,6 +363,11 @@ public class AppResource extends BaseResource {
} else {
response.add("password", passwordConfig.getValue());
}
if (folderConfig == null) {
response.addNull("folder");
} else {
response.add("folder", folderConfig.getValue());
}
if (tagConfig == null) {
response.addNull("tag");
} else {
@@ -384,10 +396,13 @@ public class AppResource extends BaseResource {
* @apiName PostAppConfigInbox
* @apiGroup App
* @apiParam {Boolean} enabled True if the inbox scanning is enabled
* @apiParam {Boolean} autoTagsEnabled If true automatically add tags to document (prefixed by #)
* @apiParam {Boolean} deleteImported If true delete message from mailbox after import
* @apiParam {String} hostname IMAP hostname
* @apiParam {Integer} port IMAP port
* @apiParam {String} username IMAP username
* @apiParam {String} password IMAP password
* @apiParam {String} folder IMAP folder
* @apiParam {String} tag Tag for created documents
* @apiError (client) ForbiddenError Access denied
* @apiError (client) ValidationError Validation error
@@ -399,22 +414,28 @@ public class AppResource extends BaseResource {
* @param portStr IMAP port
* @param username IMAP username
* @param password IMAP password
* @param folder IMAP folder
* @param tag Tag for created documents
* @return Response
*/
@POST
@Path("config_inbox")
public Response configInbox(@FormParam("enabled") Boolean enabled,
@FormParam("autoTagsEnabled") Boolean autoTagsEnabled,
@FormParam("deleteImported") Boolean deleteImported,
@FormParam("hostname") String hostname,
@FormParam("port") String portStr,
@FormParam("username") String username,
@FormParam("password") String password,
@FormParam("folder") String folder,
@FormParam("tag") String tag) {
if (!authenticate()) {
throw new ForbiddenClientException();
}
checkBaseFunction(BaseFunction.ADMIN);
ValidationUtil.validateRequired(enabled, "enabled");
ValidationUtil.validateRequired(autoTagsEnabled, "autoTagsEnabled");
ValidationUtil.validateRequired(deleteImported, "deleteImported");
if (!Strings.isNullOrEmpty(portStr)) {
ValidationUtil.validateInteger(portStr, "port");
}
@@ -422,6 +443,8 @@ public class AppResource extends BaseResource {
// Just update the changed configuration
ConfigDao configDao = new ConfigDao();
configDao.update(ConfigType.INBOX_ENABLED, enabled.toString());
configDao.update(ConfigType.INBOX_AUTOMATIC_TAGS, autoTagsEnabled.toString());
configDao.update(ConfigType.INBOX_DELETE_IMPORTED, deleteImported.toString());
if (!Strings.isNullOrEmpty(hostname)) {
configDao.update(ConfigType.INBOX_HOSTNAME, hostname);
}
@@ -434,6 +457,9 @@ public class AppResource extends BaseResource {
if (!Strings.isNullOrEmpty(password)) {
configDao.update(ConfigType.INBOX_PASSWORD, password);
}
if (!Strings.isNullOrEmpty(folder)) {
configDao.update(ConfigType.INBOX_FOLDER, folder);
}
if (!Strings.isNullOrEmpty(tag)) {
configDao.update(ConfigType.INBOX_TAG, tag);
}
@@ -486,7 +512,7 @@ public class AppResource extends BaseResource {
* @apiSuccess {String} logs.message Message
* @apiError (client) ForbiddenError Access denied
* @apiError (server) ServerError MEMORY appender not configured
* @apiPermission user
* @apiPermission admin
* @apiVersion 1.5.0
*
* @param minLevel Filter on logging level
@@ -507,6 +533,7 @@ public class AppResource extends BaseResource {
if (!authenticate()) {
throw new ForbiddenClientException();
}
checkBaseFunction(BaseFunction.ADMIN);
// Get the memory appender
org.apache.log4j.Logger logger = org.apache.log4j.Logger.getRootLogger();
@@ -643,49 +670,183 @@ public class AppResource extends BaseResource {
log.info("Deleting {} orphan ACLs", q.executeUpdate());
// Soft delete orphan comments
q = em.createNativeQuery("update T_COMMENT c set c.COM_DELETEDATE_D = :dateNow where c.COM_ID_C in (select c.COM_ID_C from T_COMMENT c left join T_DOCUMENT d on d.DOC_ID_C = c.COM_IDDOC_C and d.DOC_DELETEDATE_D is null where d.DOC_ID_C is null)");
q = em.createNativeQuery("update T_COMMENT set COM_DELETEDATE_D = :dateNow where COM_ID_C in (select c.COM_ID_C from T_COMMENT c left join T_DOCUMENT d on d.DOC_ID_C = c.COM_IDDOC_C and d.DOC_DELETEDATE_D is null where d.DOC_ID_C is null)");
q.setParameter("dateNow", new Date());
log.info("Deleting {} orphan comments", q.executeUpdate());
// Soft delete orphan document tag links
q = em.createNativeQuery("update T_DOCUMENT_TAG dt set dt.DOT_DELETEDATE_D = :dateNow where dt.DOT_ID_C in (select dt.DOT_ID_C from T_DOCUMENT_TAG dt left join T_DOCUMENT d on dt.DOT_IDDOCUMENT_C = d.DOC_ID_C and d.DOC_DELETEDATE_D is null left join T_TAG t on t.TAG_ID_C = dt.DOT_IDTAG_C and t.TAG_DELETEDATE_D is null where d.DOC_ID_C is null or t.TAG_ID_C is null)");
q = em.createNativeQuery("update T_DOCUMENT_TAG set DOT_DELETEDATE_D = :dateNow where DOT_ID_C in (select dt.DOT_ID_C from T_DOCUMENT_TAG dt left join T_DOCUMENT d on dt.DOT_IDDOCUMENT_C = d.DOC_ID_C and d.DOC_DELETEDATE_D is null left join T_TAG t on t.TAG_ID_C = dt.DOT_IDTAG_C and t.TAG_DELETEDATE_D is null where d.DOC_ID_C is null or t.TAG_ID_C is null)");
q.setParameter("dateNow", new Date());
log.info("Deleting {} orphan document tag links", q.executeUpdate());
// Soft delete orphan shares
q = em.createNativeQuery("update T_SHARE s set s.SHA_DELETEDATE_D = :dateNow where s.SHA_ID_C in (select s.SHA_ID_C from T_SHARE s left join T_ACL a on a.ACL_TARGETID_C = s.SHA_ID_C and a.ACL_DELETEDATE_D is null where a.ACL_ID_C is null)");
q = em.createNativeQuery("update T_SHARE set SHA_DELETEDATE_D = :dateNow where SHA_ID_C in (select s.SHA_ID_C from T_SHARE s left join T_ACL a on a.ACL_TARGETID_C = s.SHA_ID_C and a.ACL_DELETEDATE_D is null where a.ACL_ID_C is null)");
q.setParameter("dateNow", new Date());
log.info("Deleting {} orphan shares", q.executeUpdate());
// Soft delete orphan tags
q = em.createNativeQuery("update T_TAG t set t.TAG_DELETEDATE_D = :dateNow where t.TAG_ID_C in (select t.TAG_ID_C from T_TAG t left join T_USER u on u.USE_ID_C = t.TAG_IDUSER_C and u.USE_DELETEDATE_D is null where u.USE_ID_C is null)");
q = em.createNativeQuery("update T_TAG set TAG_DELETEDATE_D = :dateNow where TAG_ID_C in (select t.TAG_ID_C from T_TAG t left join T_USER u on u.USE_ID_C = t.TAG_IDUSER_C and u.USE_DELETEDATE_D is null where u.USE_ID_C is null)");
q.setParameter("dateNow", new Date());
log.info("Deleting {} orphan tags", q.executeUpdate());
// Soft delete orphan documents
q = em.createNativeQuery("update T_DOCUMENT d set d.DOC_DELETEDATE_D = :dateNow where d.DOC_ID_C in (select d.DOC_ID_C from T_DOCUMENT d left join T_USER u on u.USE_ID_C = d.DOC_IDUSER_C and u.USE_DELETEDATE_D is null where u.USE_ID_C is null)");
q = em.createNativeQuery("update T_DOCUMENT set DOC_DELETEDATE_D = :dateNow where DOC_ID_C in (select d.DOC_ID_C from T_DOCUMENT d left join T_USER u on u.USE_ID_C = d.DOC_IDUSER_C and u.USE_DELETEDATE_D is null where u.USE_ID_C is null)");
q.setParameter("dateNow", new Date());
log.info("Deleting {} orphan documents", q.executeUpdate());
// Soft delete orphan files
q = em.createNativeQuery("update T_FILE f set f.FIL_DELETEDATE_D = :dateNow where f.FIL_ID_C in (select f.FIL_ID_C from T_FILE f left join T_USER u on u.USE_ID_C = f.FIL_IDUSER_C and u.USE_DELETEDATE_D is null where u.USE_ID_C is null)");
q = em.createNativeQuery("update T_FILE set FIL_DELETEDATE_D = :dateNow where FIL_ID_C in (select f.FIL_ID_C from T_FILE f left join T_USER u on u.USE_ID_C = f.FIL_IDUSER_C and u.USE_DELETEDATE_D is null where u.USE_ID_C is null)");
q.setParameter("dateNow", new Date());
log.info("Deleting {} orphan files", q.executeUpdate());
// Hard delete softly deleted data
log.info("Deleting {} soft deleted document tag links", em.createQuery("delete DocumentTag dt where dt.deleteDate is not null").executeUpdate());
log.info("Deleting {} soft deleted ACLs", em.createQuery("delete Acl a where a.deleteDate is not null").executeUpdate());
log.info("Deleting {} soft deleted shares", em.createQuery("delete Share s where s.deleteDate is not null").executeUpdate());
log.info("Deleting {} soft deleted tags", em.createQuery("delete Tag t where t.deleteDate is not null").executeUpdate());
log.info("Deleting {} soft deleted comments", em.createQuery("delete Comment c where c.deleteDate is not null").executeUpdate());
log.info("Deleting {} soft deleted files", em.createQuery("delete File f where f.deleteDate is not null").executeUpdate());
log.info("Deleting {} soft deleted documents", em.createQuery("delete Document d where d.deleteDate is not null").executeUpdate());
log.info("Deleting {} soft deleted users", em.createQuery("delete User u where u.deleteDate is not null").executeUpdate());
log.info("Deleting {} soft deleted groups", em.createQuery("delete Group g where g.deleteDate is not null").executeUpdate());
log.info("Deleting {} soft deleted document tag links", em.createQuery("delete DocumentTag where deleteDate is not null").executeUpdate());
log.info("Deleting {} soft deleted ACLs", em.createQuery("delete Acl where deleteDate is not null").executeUpdate());
log.info("Deleting {} soft deleted shares", em.createQuery("delete Share where deleteDate is not null").executeUpdate());
log.info("Deleting {} soft deleted tags", em.createQuery("delete Tag where deleteDate is not null").executeUpdate());
log.info("Deleting {} soft deleted comments", em.createQuery("delete Comment where deleteDate is not null").executeUpdate());
log.info("Deleting {} soft deleted files", em.createQuery("delete File where deleteDate is not null").executeUpdate());
log.info("Deleting {} soft deleted documents", em.createQuery("delete Document where deleteDate is not null").executeUpdate());
log.info("Deleting {} soft deleted users", em.createQuery("delete User where deleteDate is not null").executeUpdate());
log.info("Deleting {} soft deleted groups", em.createQuery("delete Group where deleteDate is not null").executeUpdate());
// Always return OK
JsonObjectBuilder response = Json.createObjectBuilder()
.add("status", "ok");
return Response.ok().entity(response.build()).build();
}
/**
* Get the LDAP authentication configuration.
*
* @api {get} /app/config_ldap Get the LDAP authentication configuration
* @apiName GetAppConfigLdap
* @apiGroup App
* @apiSuccess {Boolean} enabled LDAP authentication enabled
* @apiSuccess {String} host LDAP server host
* @apiSuccess {Integer} port LDAP server port
* @apiSuccess {String} admin_dn Admin DN
* @apiSuccess {String} admin_password Admin password
* @apiSuccess {String} base_dn Base DN
* @apiSuccess {String} filter LDAP filter
* @apiSuccess {String} default_email LDAP default email
* @apiSuccess {Integer} default_storage LDAP default storage
* @apiError (client) ForbiddenError Access denied
* @apiPermission admin
* @apiVersion 1.9.0
*
* @return Response
*/
@GET
@Path("config_ldap")
public Response getConfigLdap() {
if (!authenticate()) {
throw new ForbiddenClientException();
}
checkBaseFunction(BaseFunction.ADMIN);
ConfigDao configDao = new ConfigDao();
Config enabled = configDao.getById(ConfigType.LDAP_ENABLED);
JsonObjectBuilder response = Json.createObjectBuilder();
if (enabled != null && Boolean.parseBoolean(enabled.getValue())) {
// LDAP enabled
response.add("enabled", true)
.add("host", ConfigUtil.getConfigStringValue(ConfigType.LDAP_HOST))
.add("port", ConfigUtil.getConfigIntegerValue(ConfigType.LDAP_PORT))
.add("admin_dn", ConfigUtil.getConfigStringValue(ConfigType.LDAP_ADMIN_DN))
.add("admin_password", ConfigUtil.getConfigStringValue(ConfigType.LDAP_ADMIN_PASSWORD))
.add("base_dn", ConfigUtil.getConfigStringValue(ConfigType.LDAP_BASE_DN))
.add("filter", ConfigUtil.getConfigStringValue(ConfigType.LDAP_FILTER))
.add("default_email", ConfigUtil.getConfigStringValue(ConfigType.LDAP_DEFAULT_EMAIL))
.add("default_storage", ConfigUtil.getConfigLongValue(ConfigType.LDAP_DEFAULT_STORAGE));
} else {
// LDAP disabled
response.add("enabled", false);
}
return Response.ok().entity(response.build()).build();
}
/**
* Configure the LDAP authentication.
*
* @api {post} /app/config_ldap Configure the LDAP authentication
* @apiName PostAppConfigLdap
* @apiGroup App
* @apiParam {Boolean} enabled LDAP authentication enabled
* @apiParam {String} host LDAP server host
* @apiParam {Integer} port LDAP server port
* @apiParam {String} admin_dn Admin DN
* @apiParam {String} admin_password Admin password
* @apiParam {String} base_dn Base DN
* @apiParam {String} filter LDAP filter
* @apiParam {String} default_email LDAP default email
* @apiParam {Integer} default_storage LDAP default storage
* @apiError (client) ForbiddenError Access denied
* @apiError (client) ValidationError Validation error
* @apiPermission admin
* @apiVersion 1.9.0
*
* @param enabled LDAP authentication enabled
* @param host LDAP server host
* @param portStr LDAP server port
* @param adminDn Admin DN
* @param adminPassword Admin password
* @param baseDn Base DN
* @param filter LDAP filter
* @param defaultEmail LDAP default email
* @param defaultStorageStr LDAP default storage
* @return Response
*/
@POST
@Path("config_ldap")
public Response configLdap(@FormParam("enabled") Boolean enabled,
@FormParam("host") String host,
@FormParam("port") String portStr,
@FormParam("admin_dn") String adminDn,
@FormParam("admin_password") String adminPassword,
@FormParam("base_dn") String baseDn,
@FormParam("filter") String filter,
@FormParam("default_email") String defaultEmail,
@FormParam("default_storage") String defaultStorageStr) {
if (!authenticate()) {
throw new ForbiddenClientException();
}
checkBaseFunction(BaseFunction.ADMIN);
ConfigDao configDao = new ConfigDao();
if (enabled != null && enabled) {
// LDAP enabled, validate everything
ValidationUtil.validateLength(host, "host", 1, 250);
ValidationUtil.validateInteger(portStr, "port");
ValidationUtil.validateLength(adminDn, "admin_dn", 1, 250);
ValidationUtil.validateLength(adminPassword, "admin_password", 1, 250);
ValidationUtil.validateLength(baseDn, "base_dn", 1, 250);
ValidationUtil.validateLength(filter, "filter", 1, 250);
if (!filter.contains("USERNAME")) {
throw new ClientException("ValidationError", "'filter' must contains 'USERNAME'");
}
ValidationUtil.validateLength(defaultEmail, "default_email", 1, 250);
ValidationUtil.validateLong(defaultStorageStr, "default_storage");
configDao.update(ConfigType.LDAP_ENABLED, Boolean.TRUE.toString());
configDao.update(ConfigType.LDAP_HOST, host);
configDao.update(ConfigType.LDAP_PORT, portStr);
configDao.update(ConfigType.LDAP_ADMIN_DN, adminDn);
configDao.update(ConfigType.LDAP_ADMIN_PASSWORD, adminPassword);
configDao.update(ConfigType.LDAP_BASE_DN, baseDn);
configDao.update(ConfigType.LDAP_FILTER, filter);
configDao.update(ConfigType.LDAP_DEFAULT_EMAIL, defaultEmail);
configDao.update(ConfigType.LDAP_DEFAULT_STORAGE, defaultStorageStr);
} else {
// LDAP disabled
configDao.update(ConfigType.LDAP_ENABLED, Boolean.FALSE.toString());
}
// Reset the LDAP pool to reconnect with the new configuration
LdapAuthenticationHandler.reset();
return Response.ok().build();
}
}

View File

@@ -8,8 +8,11 @@ import com.sismics.security.UserPrincipal;
import com.sismics.util.filter.SecurityFilter;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import java.security.Principal;
import java.util.List;
import java.util.Set;
@@ -19,6 +22,8 @@ import java.util.Set;
*
* @author jtremeaux
*/
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
@Produces(MediaType.APPLICATION_JSON)
public abstract class BaseResource {
/**
* @apiDefine admin Admin

View File

@@ -0,0 +1,34 @@
package com.sismics.docs.rest.resource;
import org.glassfish.jersey.message.internal.ReaderWriter;
import javax.json.JsonObject;
import javax.ws.rs.Produces;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.ext.MessageBodyWriter;
import javax.ws.rs.ext.Provider;
import java.io.IOException;
import java.io.OutputStream;
import java.lang.annotation.Annotation;
import java.lang.reflect.Type;
/**
* When a JSON-based exception is thrown but a JSON response is not expected,
* set the media type of the response as plain text.
*/
@Provider
@Produces(MediaType.APPLICATION_OCTET_STREAM)
public class DocsMessageBodyWriter implements MessageBodyWriter<JsonObject> {
@Override
public boolean isWriteable(Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType) {
return true;
}
@Override
public void writeTo(JsonObject o, Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType, MultivaluedMap<String, Object> httpHeaders, OutputStream entityStream) throws IOException, WebApplicationException {
ReaderWriter.writeToAsString(o.toString(), entityStream, MediaType.TEXT_PLAIN_TYPE);
}
}

Some files were not shown because too many files have changed in this diff Show More