Commit 1afa46ca authored by aleksandr-wemakesoftware's avatar aleksandr-wemakesoftware Committed by GitHub

Merge pull request #12 from e-gov/develop

RIHA 7.4 release preparation
parents 97d3f30e d3976ab7
Märkus. Ümber nimetatud repo `kirjeldusmoodul-rest-api` failist README.md - Priit P 27.04.2017
# Arendusjuhend
## Eeldused
- PostgreSQL 9.6 andmebaas
- Tomcat 8
- Ubuntu 16.04
- Ubuntu 18.04
- OpenJDK 1.8
- Maven
- Maven (testitud 3.3.9 peal)
## Andmebaasi paigaldamine
1. Paigaldada PostgreSQL versioonil 9.6 töötav andmebaas
2. Tekitada andmebaasi vajalikud tabelid, kasutades RIHA-Storage komponendi [dbcreate.sh](https://github.com/e-gov/RIHA-Storage/blob/master/dbcreate.sh) skripti:
Andmebaasitabelite tekitamiseks tuleb anda selle repositooriumi juurkataloogist käsk:
```bash
PGPASSWORD={password} sh ./dbcreate.sh [-n] {hostname} {dbport} {dbname} {username}
```
Käsus tuleb näidata järgmine info:
- **-n** – Andmebaas tekitatakse ilma vana RIHA andmebaasiga sidumata (st –tegemist on vana RIHA lahendusest sõltumatu paigaldusega)
- **hostname** - Andmebaasiserveri IP aadress või DNS nimi
- **dbport** - Andmebaasiserveri poolt kasutatav port
- **dbname** - andmebaasitabeleid sisaldava andmebaasi nimi.
- **username** - andmebaasi administreerimise õiguseid omava andmebaasikasutaja nimi
- **password** - Andmebaasikasutaja parool
3. Andmebaasis peab leiduma skeem nimega **_riha_** ning see peab olema antud kasutaja
2. Andmebaasi vajalikud tabelid tekitatakse Liquibase'iga, mis käivitatakse rakendust käivitades _runtime_ ajal. _Runtime_ jooksul peab Liquibase saama ligi paigaldatud andmebaasile. Paigaldatud andmebaasi konfiguratsioon tuleb lisada siia: [riharest.project.properties konfiguratsioonifaili](https://github.com/e-gov/RIHA-Storage/blob/develop/src/main/resources/riharest.project.properties)
3. Andmebaasiühenduse konfigureeritavate parameetrite kohta leiab infot peatükist ["Paigalduse häälestamine"](#konfiguratsioon)
4. Andmebaasis peab leiduma skeem nimega **_riha_** ning see peab olema antud kasutaja
vaikimisi skeemiks. Vajalikul kujul andmebaas ja kasutajatunnus tuleb tekitada eraldi käsitsi
andmebaasihalduse tarkvara abil. Muud täiendavad nõudeid tekitatud andmebaasile puuduvad.
## Andmebaasi uuendamine
## Lahenduse kompileerimine
Tarkvara kompileerimine ning WAR paketi tegemine:
......@@ -39,6 +29,7 @@ mvn package
Kompileeritud WAR paketi leiab `target/` kataloogist.
<a name="konfiguratsioon"></a>
## Paigalduse häälestamine
### Kompileerimise käigus
......
......@@ -63,7 +63,7 @@
<profiles>
<profile>
<id>${project.groupId}-${project.artifactId}}</id>
<id>${project.groupId}-${project.artifactId}</id>
<activation>
<activeByDefault>true</activeByDefault>
......@@ -85,7 +85,7 @@
<riharest.authService>http://localhost:8080/riha/sessionManagementServlet</riharest.authService>
<!-- Note the trailing slash! -->
<riharest.pathRootWindows>C:\\Users\\Praktikant\\test_folder\\</riharest.pathRootWindows>
<riharest.pathRootWindows>C:\\test_folder\\</riharest.pathRootWindows>
</properties>
</profile>
......@@ -244,6 +244,14 @@
<scope>test</scope>
</dependency>
<!-- embedded postgres -->
<dependency>
<groupId>ru.yandex.qatools.embed</groupId>
<artifactId>postgresql-embedded</artifactId>
<version>2.10</version>
<scope>test</scope>
</dependency>
<!-- other -->
<dependency>
<groupId>org.apache.commons</groupId>
......@@ -260,6 +268,11 @@
<artifactId>commons-io</artifactId>
<version>2.6</version>
</dependency>
<dependency>
<groupId>org.apache.poi</groupId>
<artifactId>poi-ooxml</artifactId>
<version>4.0.1</version>
</dependency>
</dependencies>
......@@ -423,16 +436,40 @@
</pluginManagement>
<plugins>
<plugin>
<groupId>org.jacoco</groupId>
<artifactId>jacoco-maven-plugin</artifactId>
<version>0.8.2</version>
<executions>
<execution>
<id>jacoco-initialize</id>
<goals>
<goal>prepare-agent</goal>
</goals>
</execution>
<execution>
<id>jacoco-report</id>
<phase>test</phase>
<goals>
<goal>report</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-surefire-plugin</artifactId>
<configuration>
<useSystemClassLoader>false</useSystemClassLoader>
<excludes>
<exclude>**/integration/**.java</exclude>
<exclude>**/integration/*/**.java</exclude>
</excludes>
<includes>
<include>DAOTestSuite.java</include>
</includes>
</configuration>
</plugin>
......@@ -458,161 +495,161 @@
<plugin>
<groupId>org.apache.tomcat.maven</groupId>
<artifactId>tomcat7-maven-plugin</artifactId>
<version>2.2</version>
<dependencies>
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>${POSTGRESQL.VERSION}</version>
</dependency>
</dependencies>
<executions>
<execution>
<id>start-tomcat</id>
<goals>
<!-- <goal>run-war</goal> -->
<goal>run</goal>
</goals>
<phase>pre-integration-test</phase>
<configuration>
<port>${test.server.port}</port>
<fork>true</fork>
</configuration>
</execution>
<execution>
<id>stop-tomcat</id>
<goals>
<goal>shutdown</goal>
</goals>
<phase>post-integration-test</phase>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-failsafe-plugin</artifactId>
<version>2.18.1</version>
<configuration>
<argLine>-Xmx2048m -XX:MaxPermSize=1024m</argLine>
<environmentVariables>
<serviceurl>http://localhost:${test.server.port}/rest</serviceurl>
</environmentVariables>
<includes>
<include>**/integration/**.java</include>
<include>**/integration/*/**.java</include>
</includes>
<encoding>UTF-8</encoding>
</configuration>
<executions>
<execution>
<goals>
<goal>integration-test</goal>
<goal>verify</goal>
</goals>
</execution>
</executions>
</plugin>
<plugin>
<groupId>com.lazerycode.jmeter</groupId>
<artifactId>jmeter-maven-plugin</artifactId>
<version>2.0.3</version>
<configuration>
<propertiesJMeter>
<log_level.jmeter>DEBUG</log_level.jmeter>
</propertiesJMeter>
<testFilesIncluded>
<jMeterTestFile>${jmeter.test}.jmx</jMeterTestFile>
</testFilesIncluded>
<testResultsTimestamp>false</testResultsTimestamp>
<overrideRootLogLevel>DEBUG</overrideRootLogLevel>
<suppressJMeterOutput>${jmeter.suppressOutput}</suppressJMeterOutput>
<ignoreResultFailures>${jmeter.ignoreFailures}</ignoreResultFailures>
<skipTests>${jmeter.skip}</skipTests>
<propertiesUser>
<restApiHost>${jmeter.host}</restApiHost>
<restApiPort>${jmeter.port}</restApiPort>
<parallelRequests>${jmeter.parallelRequests}</parallelRequests>
<rampUpPeriod>${jmeter.rampUpPeriod}</rampUpPeriod>
<loopCount>${jmeter.loopCount}</loopCount>
</propertiesUser>
<!-- propertiesJMeter> <jmeter.save.saveservice.thread_counts>true</jmeter.save.saveservice.thread_counts>
</propertiesJMeter -->
<jmeterExtensions>
<artifact>kg.apc:jmeter-plugins-json:jar:2.3</artifact>
</jmeterExtensions>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-antrun-plugin</artifactId>
<executions>
<!-- use real PATH_ROOT in integration tests -->
<execution>
<id>set_actual.riharest.path_root</id>
<phase>pre-integration-test</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<propertyfile
file="${project.build.testOutputDirectory}/riharest.project.properties">
<entry key="PATH_ROOT" value="${riharest.pathRoot}" />
</propertyfile>
</target>
</configuration>
</execution>
<!-- change PATH_ROOT back to test value after integration tests-->
<execution>
<id>set_test.riharest.path_root</id>
<phase>post-integration-test</phase>
<goals>
<goal>run</goal>
</goals>
<configuration>
<target>
<propertyfile
file="${project.build.testOutputDirectory}/riharest.project.properties">
<entry key="PATH_ROOT" value="${riharest.test.pathRoot}" />
</propertyfile>
</target>
</configuration>
</execution>
</executions>
</plugin>
<!--<plugin>-->
<!--<groupId>org.apache.tomcat.maven</groupId>-->
<!--<artifactId>tomcat7-maven-plugin</artifactId>-->
<!--<version>2.2</version>-->
<!--<dependencies>-->
<!--<dependency>-->
<!--<groupId>org.postgresql</groupId>-->
<!--<artifactId>postgresql</artifactId>-->
<!--<version>${POSTGRESQL.VERSION}</version>-->
<!--</dependency>-->
<!--</dependencies>-->
<!--<executions>-->
<!--<execution>-->
<!--<id>start-tomcat</id>-->
<!--<goals>-->
<!--&lt;!&ndash; <goal>run-war</goal> &ndash;&gt;-->
<!--<goal>run</goal>-->
<!--</goals>-->
<!--<phase>pre-integration-test</phase>-->
<!--<configuration>-->
<!--<port>${test.server.port}</port>-->
<!--<fork>true</fork>-->
<!--</configuration>-->
<!--</execution>-->
<!--<execution>-->
<!--<id>stop-tomcat</id>-->
<!--<goals>-->
<!--<goal>shutdown</goal>-->
<!--</goals>-->
<!--<phase>post-integration-test</phase>-->
<!--</execution>-->
<!--</executions>-->
<!--</plugin>-->
<!--<plugin>-->
<!--<groupId>org.apache.maven.plugins</groupId>-->
<!--<artifactId>maven-failsafe-plugin</artifactId>-->
<!--<version>2.18.1</version>-->
<!--<configuration>-->
<!--<argLine>-Xmx2048m -XX:MaxPermSize=1024m</argLine>-->
<!--<environmentVariables>-->
<!--<serviceurl>http://localhost:${test.server.port}/rest</serviceurl>-->
<!--</environmentVariables>-->
<!--<includes>-->
<!--<include>**/integration/**.java</include>-->
<!--<include>**/integration/*/**.java</include>-->
<!--</includes>-->
<!--<encoding>UTF-8</encoding>-->
<!--</configuration>-->
<!--<executions>-->
<!--<execution>-->
<!--<goals>-->
<!--<goal>integration-test</goal>-->
<!--<goal>verify</goal>-->
<!--</goals>-->
<!--</execution>-->
<!--</executions>-->
<!--</plugin>-->
<!--<plugin>-->
<!--<groupId>com.lazerycode.jmeter</groupId>-->
<!--<artifactId>jmeter-maven-plugin</artifactId>-->
<!--<version>2.0.3</version>-->
<!--<configuration>-->
<!--<propertiesJMeter>-->
<!--<log_level.jmeter>DEBUG</log_level.jmeter>-->
<!--</propertiesJMeter>-->
<!--<testFilesIncluded>-->
<!--<jMeterTestFile>${jmeter.test}.jmx</jMeterTestFile>-->
<!--</testFilesIncluded>-->
<!--<testResultsTimestamp>false</testResultsTimestamp>-->
<!--<overrideRootLogLevel>DEBUG</overrideRootLogLevel>-->
<!--<suppressJMeterOutput>${jmeter.suppressOutput}</suppressJMeterOutput>-->
<!--<ignoreResultFailures>${jmeter.ignoreFailures}</ignoreResultFailures>-->
<!--<skipTests>${jmeter.skip}</skipTests>-->
<!--<propertiesUser>-->
<!--<restApiHost>${jmeter.host}</restApiHost>-->
<!--<restApiPort>${jmeter.port}</restApiPort>-->
<!--<parallelRequests>${jmeter.parallelRequests}</parallelRequests>-->
<!--<rampUpPeriod>${jmeter.rampUpPeriod}</rampUpPeriod>-->
<!--<loopCount>${jmeter.loopCount}</loopCount>-->
<!--</propertiesUser>-->
<!--&lt;!&ndash; propertiesJMeter> <jmeter.save.saveservice.thread_counts>true</jmeter.save.saveservice.thread_counts> -->
<!--</propertiesJMeter &ndash;&gt;-->
<!--<jmeterExtensions>-->
<!--<artifact>kg.apc:jmeter-plugins-json:jar:2.3</artifact>-->
<!--</jmeterExtensions>-->
<!--</configuration>-->
<!--</plugin>-->
<!--<plugin>-->
<!--<groupId>org.apache.maven.plugins</groupId>-->
<!--<artifactId>maven-antrun-plugin</artifactId>-->
<!---->
<!--<executions>-->
<!--&lt;!&ndash; use real PATH_ROOT in integration tests &ndash;&gt;-->
<!--<execution>-->
<!---->
<!--<id>set_actual.riharest.path_root</id>-->
<!---->
<!--<phase>pre-integration-test</phase>-->
<!---->
<!--<goals>-->
<!--<goal>run</goal>-->
<!--</goals>-->
<!---->
<!--<configuration>-->
<!--<target>-->
<!--<propertyfile-->
<!--file="${project.build.testOutputDirectory}/riharest.project.properties">-->
<!--<entry key="PATH_ROOT" value="${riharest.pathRoot}" />-->
<!--</propertyfile>-->
<!--</target>-->
<!--</configuration>-->
<!--</execution>-->
<!---->
<!--&lt;!&ndash; change PATH_ROOT back to test value after integration tests&ndash;&gt;-->
<!--<execution>-->
<!---->
<!--<id>set_test.riharest.path_root</id>-->
<!---->
<!--<phase>post-integration-test</phase>-->
<!---->
<!--<goals>-->
<!--<goal>run</goal>-->
<!--</goals>-->
<!---->
<!--<configuration>-->
<!--<target>-->
<!--<propertyfile-->
<!--file="${project.build.testOutputDirectory}/riharest.project.properties">-->
<!--<entry key="PATH_ROOT" value="${riharest.test.pathRoot}" />-->
<!--</propertyfile>-->
<!--</target>-->
<!--</configuration>-->
<!--</execution>-->
<!--</executions>-->
<!--</plugin>-->
</plugins>
</build>
......
WITH imported_info_system AS (
SELECT
DISTINCT ON (inf.lyhinimi)
inf.lyhinimi,
uuid_in(md5(inf.lyhinimi) :: CSTRING) AS inf_uuid,
json_build_object(
'class', substring(inf.tk_kaideldavus_kood, '[^_]*$')
|| substring(inf.tk_terviklus_kood, '[^_]*$')
|| substring(inf.tk_konfidentsiaalsus_kood, '[^_]*$'),
'level', CASE
WHEN (inf.tk_kaideldavus_kood || inf.tk_terviklus_kood || inf.tk_konfidentsiaalsus_kood) ~ '3' THEN 'H'
WHEN (inf.tk_kaideldavus_kood || inf.tk_terviklus_kood || inf.tk_konfidentsiaalsus_kood) ~ '2' THEN 'M'
ELSE 'L'
END,
'standard', 'ISKE',
'latest_audit_date', 'null',
'latest_audit_resolution', 'null') AS security
FROM infosysteem inf
WHERE inf.kuupaev_kuni IS NULL
AND inf.staatus_kood IS DISTINCT FROM 'STAATUS_EI_ASUTATA'
AND inf.staatus_kood IS DISTINCT FROM 'INFOSYS_STAATUS_LOPETATUD'
AND kategooria IS DISTINCT FROM 'INFOSYSTEEM_KATEGOORIA_ALAMSYSTEEM'
AND inf.tk_kaideldavus_kood IS NOT NULL
AND inf.tk_konfidentsiaalsus_kood IS NOT NULL
AND inf.tk_terviklus_kood IS NOT NULL
ORDER BY inf.lyhinimi, inf.created DESC
)
INSERT INTO riha.main_resource(main_resource_id, uri, name, owner, short_name, version, json_content, parent_uri, main_resource_parent_id,
kind, state, start_date, end_date, creator, modifier, creation_date, modified_date, old_id, field_name, kind_id, main_resource_template_id)
SELECT
nextval('riha.main_resource_seq'),
uri,
name,
owner,
short_name,
version,
jsonb_set(json_content, '{security}', imported_info_system.security :: jsonb),
parent_uri,
main_resource_parent_id,
kind,
state,
start_date,
end_date,
creator,
modifier,
creation_date,
modified_date,
old_id,
field_name,
kind_id,
main_resource_template_id
FROM imported_info_system INNER JOIN riha.main_resource_view mrv
ON imported_info_system.inf_uuid = (mrv.json_content ->> 'uuid') :: UUID
WHERE NOT mrv.json_content ? 'security';
\ No newline at end of file
......@@ -45,7 +45,7 @@ public class RegisteredFileGrid extends AbstractQueryGrid {
Criterion dataCriterion = createFileDataSearchRestriction(dataFilterParameters);
if (dataCriterion != null) {
DetachedCriteria dataSubQueryCriteria = DetachedCriteria.forClass(
RegisteredFileView.LargeObjectCsvRecord.class, "csv")
RegisteredFileView.LargeObjectRecord.class, "csv")
.setProjection(Projections.id())
.add(dataCriterion);
......
......@@ -19,7 +19,7 @@ import org.springframework.util.StringUtils;
import java.io.IOException;
import java.io.InputStreamReader;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
......@@ -27,45 +27,29 @@ import java.util.Map;
* Converts CSV content from {@link LargeObject} of {@link FileResource} to {@link JsonNode}.
*/
@Component
public class CsvToGsonConverter {
public class CsvToGsonConverter implements ToGsonConverter {
public static final char DELIMITER = ';';
private static final Logger logger = LoggerFactory.getLogger(CsvToGsonConverter.class);
private static final Logger LOGGER = LoggerFactory.getLogger(CsvToGsonConverter.class);
private static final String CSV_FILE_SUFFIX = ".csv";
private static final CSVFormat DEFAULT_WITH_HEADERS = CSVFormat.DEFAULT
.withDelimiter(';')
.withDelimiter(DELIMITER)
.withFirstRecordAsHeader()
.withIgnoreEmptyLines()
.withIgnoreSurroundingSpaces();
private static List<MediaType> supportedMediaTypes = Arrays.asList(MediaType.valueOf("text/csv"));
private static final List<MediaType> SUPPORTED_MEDIA_TYPES = Collections.singletonList(MediaType.valueOf("text/csv"));
@Override
public boolean supports(FileResource fileResource) {
return supportedMediaTypes.contains(MediaType.valueOf(fileResource.getContentType()))
return SUPPORTED_MEDIA_TYPES.contains(MediaType.valueOf(fileResource.getContentType()))
|| StringUtils.endsWithIgnoreCase(fileResource.getName(), CSV_FILE_SUFFIX);
}
/**
* Converts {@link FileResource} input stream to {@link JsonNode} form
* <pre>
* {
* "meta": {&lt;used FileResource metadata&gt;}
* "headers": [&lt;CSV headers&gt;],
* "records": [
* {
* "header-name": "value",
* ...
* }
* ]
* }
* </pre>
*
* @param fileResource converted file resource
* @return created JsonNode
* @throws IOException in case of parsing errors
*/
@Override
public JsonObject convert(FileResource fileResource) throws IOException, SQLException {
logger.debug("Starting file resource '{}' CSV to JSON conversion", fileResource.getUuid());
LOGGER.debug("Starting file resource '{}' CSV to JSON conversion", fileResource.getUuid());
CSVParser parser = getFormat(fileResource)
.parse(new InputStreamReader(
......
package ee.eesti.riha.rest.dao.util;
import com.fasterxml.jackson.databind.JsonNode;
import com.google.gson.JsonObject;
import ee.eesti.riha.rest.model.FileResource;
import ee.eesti.riha.rest.model.LargeObject;
import org.apache.poi.ss.usermodel.*;
import org.apache.poi.xssf.usermodel.XSSFWorkbook;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;
import javax.sql.rowset.serial.SerialBlob;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.sql.Blob;
import java.sql.SQLException;
import java.util.Collections;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Converts Excel content from {@link LargeObject} of {@link FileResource} to {@link JsonNode}.
*/
@Component
public class ExcelToGsonConverter implements ToGsonConverter {
private static final Pattern QUOTE_PATTERN = Pattern.compile("\"");
private static final List<MediaType> SUPPORTED_MEDIA_TYPES = Collections.singletonList(MediaType.valueOf("application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"));
private static final String EXCEL_FILE_SUFFIX = ".xls";
private static final String EXCELX_FILE_SUFFIX = ".xlsx";
@Autowired
private CsvToGsonConverter csvToGsonConverter;
@Override
public boolean supports(FileResource fileResource) {
return SUPPORTED_MEDIA_TYPES.contains(MediaType.valueOf(fileResource.getContentType()))
|| StringUtils.endsWithIgnoreCase(fileResource.getName(), EXCEL_FILE_SUFFIX)
|| StringUtils.endsWithIgnoreCase(fileResource.getName(), EXCELX_FILE_SUFFIX);
}
@Override
public JsonObject convert(FileResource fileResource) throws IOException, SQLException {
ByteArrayInputStream inputStream = null;
ByteArrayOutputStream byteArrayOutputStream = null;
PrintStream printStream = null;
try {
Blob blob = fileResource.getLargeObject().getData();
inputStream = new ByteArrayInputStream(blob.getBytes(1, ((int) blob.length())));
Workbook workbook = new XSSFWorkbook(inputStream);
FormulaEvaluator formulaEvaluator = workbook.getCreationHelper().createFormulaEvaluator();
DataFormatter formatter = new DataFormatter();
byteArrayOutputStream = new ByteArrayOutputStream();
printStream = new PrintStream(byteArrayOutputStream, true, "UTF-8");
byte[] bom = {(byte) 0xEF, (byte) 0xBB, (byte) 0xBF};
printStream.write(bom);
for (int sheetNumber = 0; sheetNumber < workbook.getNumberOfSheets(); sheetNumber++) {
Sheet sheet = workbook.getSheetAt(sheetNumber);
for (int rowNumber = 0; rowNumber <= sheet.getLastRowNum(); rowNumber++) {
Row row = sheet.getRow(rowNumber);
if (row == null) {
printStream.println(CsvToGsonConverter.DELIMITER);
continue;
}
boolean firstCell = true;
for (int cellNUmber = 0; cellNUmber < row.getLastCellNum(); cellNUmber++) {
Cell cell = row.getCell(cellNUmber, Row.MissingCellPolicy.RETURN_BLANK_AS_NULL);
if (!firstCell) printStream.print(CsvToGsonConverter.DELIMITER);
if (cell != null) {
cell = formulaEvaluator.evaluateInCell(cell);
String value = formatter.formatCellValue(cell);
if (cell.getCellType() == CellType.FORMULA) {
value = "=" + value;
}
printStream.print(encodeValue(value));
}
firstCell = false;
}
printStream.println();
}
}
fileResource.getLargeObject().setData(new SerialBlob(byteArrayOutputStream.toByteArray()));
} finally {
if (inputStream != null) {
inputStream.close();
}
if (byteArrayOutputStream != null) {
byteArrayOutputStream.close();
}
if (printStream != null) {
printStream.close();
}
}
return csvToGsonConverter.convert(fileResource);
}
static private String encodeValue(String value) {
boolean needQuotes = false;
if (value.indexOf(CsvToGsonConverter.DELIMITER) != -1 || value.indexOf('"') != -1 ||
value.indexOf('\n') != -1 || value.indexOf('\r') != -1) {
needQuotes = true;
}
Matcher m = QUOTE_PATTERN.matcher(value);
if (m.find()) {
needQuotes = true;
}
value = m.replaceAll("\"\"");
return needQuotes ? "\"" + value + "\"" : value;
}
}
\ No newline at end of file
package ee.eesti.riha.rest.dao.util;
import com.fasterxml.jackson.databind.JsonNode;
import com.google.gson.JsonObject;
import ee.eesti.riha.rest.model.FileResource;
import ee.eesti.riha.rest.model.LargeObject;