Commit df5530c8 authored by Valentin Suhnjov's avatar Valentin Suhnjov

Merge branch 'releases/release-0.7.0' into master

parents 30577587 6635a505
......@@ -5,7 +5,7 @@
<groupId>ee.eesti.riha</groupId>
<artifactId>rest</artifactId>
<version>0.6.0</version>
<version>0.7.0</version>
<packaging>war</packaging>
......
......@@ -26,4 +26,8 @@ alter table riha.comment add CONSTRAINT fk_comment_comment FOREIGN KEY (comment_
REFERENCES riha.comment (comment_id) MATCH SIMPLE
ON UPDATE NO ACTION ON DELETE NO ACTION;
ALTER TABLE riha.file_resource ADD CONSTRAINT fk_file_resource_large_object FOREIGN KEY (large_object_id)
REFERENCES riha.large_object (id) MATCH SIMPLE
ON UPDATE NO ACTION ON DELETE NO ACTION;
CREATE TRIGGER tr_infosystem_update AFTER UPDATE ON main_resource FOR EACH ROW EXECUTE PROCEDURE infosystem_trg();
......@@ -58,6 +58,14 @@ START 436069
CACHE 1;
GRANT SELECT, USAGE ON SEQUENCE riha.main_resource_seq TO riha;
CREATE SEQUENCE riha.large_object_seq
INCREMENT 1
START 1
MINVALUE 1
MAXVALUE 9223372036854775807
CACHE 1;
GRANT SELECT, USAGE ON SEQUENCE riha.large_object_seq TO riha;
-- Table: riha.kind
-- DROP TABLE riha.kind;
......@@ -356,9 +364,50 @@ COMMENT ON COLUMN riha.comment.type IS 'Hinnangu tüüp';
COMMENT ON COLUMN riha.comment.title IS 'Hinnangu pealkiri';
COMMENT ON COLUMN riha.comment.sub_type IS 'Hinnangu alamtüüp';
GRANT SELECT, INSERT, UPDATE, DELETE ON TABLE riha.comment TO riha;
-- Table: riha.large_object
-- DROP TABLE riha.large_object;
CREATE TABLE riha.large_object
(
id integer NOT NULL,
creation_date timestamp without time zone,
data oid,
hash character varying(255),
length bigint,
CONSTRAINT large_object_pkey PRIMARY KEY (id)
)
WITH (
OIDS = FALSE
);
GRANT SELECT, INSERT, UPDATE, DELETE ON TABLE riha.large_object TO riha;
-- Table: riha.file_resource
-- DROP TABLE riha.file_resource;
CREATE TABLE riha.file_resource
(
uuid uuid NOT NULL,
content_type character varying(255),
creation_date timestamp without time zone,
name character varying(255),
large_object_id integer NOT NULL,
CONSTRAINT file_resource_pkey PRIMARY KEY (uuid),
CONSTRAINT fk_file_resource_large_object FOREIGN KEY (large_object_id)
REFERENCES riha.large_object (id) MATCH SIMPLE
ON UPDATE NO ACTION
ON DELETE NO ACTION
)
WITH (
OIDS = FALSE
);
GRANT SELECT, INSERT, UPDATE, DELETE ON TABLE riha.file_resource TO riha;
-- Index: riha.ixfk_comment_comment
-- DROP INDEX riha.ixfk_comment_comment;
......
CREATE VIEW riha.main_resource_view AS
SELECT DISTINCT ON (json_content ->> 'uuid') *
FROM riha.main_resource
CREATE OR REPLACE VIEW riha.main_resource_view AS
SELECT DISTINCT ON (json_content ->> 'uuid') *,
((main_resource.json_content #>> '{meta,creation_timestamp}'::text[]))::timestamp AS j_creation_timestamp
FROM riha.main_resource as main_resource
ORDER BY json_content ->> 'uuid',
creation_date DESC,
main_resource_id DESC;
\ No newline at end of file
......@@ -5,4 +5,5 @@ alter table riha.data_object drop constraint fk_data_object_main_resource;
alter table riha.document drop CONSTRAINT fk_data_object;
alter TABLE riha.document DROP CONSTRAINT fk_main_resource;
alter table riha.comment drop CONSTRAINT fk_comment_comment;
alter table riha.file_resource drop CONSTRAINT fk_file_resource_large_object;
DROP TRIGGER IF EXISTS tr_infosystem_update ON main_resource;
......@@ -42,6 +42,14 @@ DROP TABLE riha.main_resource;
DROP TABLE riha.kind;
-- Table: riha.file_resource
DROP TABLE riha.file_resource;
-- Table: riha.large_object
DROP TABLE riha.large_object;
-- Sequence: riha.comment_seq
DROP SEQUENCE riha.comment_seq;
......
CREATE SEQUENCE riha.large_object_seq
INCREMENT 1
START 1
MINVALUE 1
MAXVALUE 9223372036854775807
CACHE 1;
GRANT SELECT, USAGE ON SEQUENCE riha.large_object_seq TO riha;
-- Table: riha.large_object
-- DROP TABLE riha.large_object;
CREATE TABLE riha.large_object
(
id integer NOT NULL,
creation_date timestamp without time zone,
data oid,
hash character varying(255),
CONSTRAINT large_object_pkey PRIMARY KEY (id)
)
WITH (
OIDS = FALSE
);
GRANT SELECT, INSERT, UPDATE, DELETE ON TABLE riha.large_object TO riha;
-- Table: riha.file_resource
-- DROP TABLE riha.file_resource;
CREATE TABLE riha.file_resource
(
uuid uuid NOT NULL,
content_type character varying(255),
creation_date timestamp without time zone,
name character varying(255),
large_object_id integer NOT NULL,
CONSTRAINT file_resource_pkey PRIMARY KEY (uuid),
CONSTRAINT fk_file_resource_large_object FOREIGN KEY (large_object_id)
REFERENCES riha.large_object (id) MATCH SIMPLE
ON UPDATE NO ACTION
ON DELETE NO ACTION
)
WITH (
OIDS = FALSE
);
GRANT SELECT, INSERT, UPDATE, DELETE ON TABLE riha.file_resource TO riha;
ALTER TABLE riha.large_object
ADD COLUMN length BIGINT;
\ No newline at end of file
CREATE OR REPLACE VIEW riha.main_resource_view AS
SELECT DISTINCT ON (json_content ->> 'uuid') *,
((main_resource.json_content #>> '{meta,creation_timestamp}'::text[]))::timestamp AS j_creation_timestamp
FROM riha.main_resource as main_resource
ORDER BY json_content ->> 'uuid',
creation_date DESC,
main_resource_id DESC;
\ No newline at end of file
package ee.eesti.riha.rest.dao;
import ee.eesti.riha.rest.model.FileResource;
import org.hibernate.SessionFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import javax.transaction.Transactional;
import java.util.Date;
import java.util.UUID;
@Component
@Transactional
public class FileResourceDAO {
@Autowired
private SessionFactory sessionFactory;
/**
* Creates single {@link FileResource} entity.
*
* @param entity persisted entity
* @return UUID of persisted {@link FileResource}
*/
public UUID create(FileResource entity) {
entity.setCreationDate(new Date());
return (UUID) sessionFactory.getCurrentSession().save(entity);
}
/**
* Retrieves single {@link FileResource} entity by its UUID.
*
* @param uuid entity UUID
* @return loaded entity or null if not found
*/
public FileResource get(UUID uuid) {
return (FileResource) sessionFactory.getCurrentSession().get(FileResource.class, uuid);
}
}
package ee.eesti.riha.rest.dao;
import ee.eesti.riha.rest.logic.util.LengthCalculatingInputStream;
import ee.eesti.riha.rest.model.LargeObject;
import org.hibernate.Criteria;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.criterion.Order;
import org.hibernate.criterion.Projections;
import org.hibernate.criterion.Restrictions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import javax.transaction.Transactional;
import javax.xml.bind.DatatypeConverter;
import java.io.InputStream;
import java.security.DigestInputStream;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Date;
import java.util.List;
@Component
@Transactional
public class LargeObjectDAO {
private static final Logger LOG = LoggerFactory.getLogger(LargeObjectDAO.class);
private static final String HASH_ALGORITHM = "SHA-256";
private boolean deleteWhenReuseFound = true;
@Autowired
private SessionFactory sessionFactory;
/**
* <p>Creates {@link LargeObject} entity from provided {@link InputStream} and calculates SHA-256 hash of the file
* in the process.</p> <p> <p>In most cases, created entity will contain new data that was not previously persisted.
* Making this assumption, this method tries to first persist entity and hash its data in the process and then try
* to find already existing entities with the same hash. Either currently created or oldest existing entity ID will
* be returned.</p>
*
* @param inputStream object input stream
* @return id of created entity or id of existing entity with the same hash
*/
public int create(InputStream inputStream) {
LengthCalculatingInputStream lengthCalculatingInputStream = new LengthCalculatingInputStream(inputStream);
DigestInputStream digestInputStream;
try {
digestInputStream = new DigestInputStream(lengthCalculatingInputStream, MessageDigest.getInstance(HASH_ALGORITHM));
} catch (NoSuchAlgorithmException e) {
throw new IllegalStateException("Could not create DigestInputStream with algorithm " + HASH_ALGORITHM, e);
}
LargeObject entity = createEntityFromInputStream(digestInputStream);
setHash(entity, digestInputStream.getMessageDigest());
setLength(entity, lengthCalculatingInputStream.getLength());
if (deleteWhenReuseFound) {
Integer reusableEntityId = getFirstReusableEntityId(entity);
if (reusableEntityId != null) {
if (LOG.isInfoEnabled()) {
LOG.info("Deleting persisted LargeObject with id {} since reusable LargeObject with id {} is found for hash {}",
entity.getId(), reusableEntityId, entity.getHash());
}
delete(entity);
return reusableEntityId;
}
}
return entity.getId();
}
private Integer getFirstReusableEntityId(LargeObject entity) {
List<Integer> existingObjects = findSameHashIds(entity);
if (!existingObjects.isEmpty()) {
int firstId = existingObjects.get(0);
if (LOG.isInfoEnabled()) {
LOG.info("There is {} other LargeObject entities with the same hash {}, using oldest one with id {}",
existingObjects.size(), entity.getHash(), firstId);
}
return firstId;
}
return null;
}
private List<Integer> findSameHashIds(LargeObject entity) {
Criteria criteria = sessionFactory.getCurrentSession().createCriteria(LargeObject.class, "lo")
.setProjection(Projections.id())
.add(Restrictions.ne("lo.id", entity.getId()))
.add(Restrictions.eq("lo.hash", entity.getHash()))
.addOrder(Order.asc("lo.creationDate"));
return (List<Integer>) criteria.list();
}
private LargeObject createEntityFromInputStream(InputStream inputStream) {
if (LOG.isDebugEnabled()) {
LOG.debug("Creating LargeObject entity");
}
Session session = sessionFactory.getCurrentSession();
LargeObject entity = new LargeObject();
entity.setCreationDate(new Date());
entity.setData(session.getLobHelper().createBlob(inputStream, -1));
// Save and flush in order to persist blob and calculate hash
session.save(entity);
session.flush();
if (LOG.isDebugEnabled()) {
LOG.debug("LargeObject with id {} is created", entity.getId());
}
return entity;
}
private void setHash(LargeObject entity, MessageDigest digest) {
String hash = DatatypeConverter.printHexBinary(digest.digest());
String algorithm = digest.getAlgorithm();
if (LOG.isDebugEnabled()) {
LOG.debug("LargeObject entity calculated {} hash is {}", algorithm, hash);
}
entity.setHash(hash);
sessionFactory.getCurrentSession().saveOrUpdate(entity);
}
private void setLength(LargeObject entity, long length) {
entity.setLength(length);
sessionFactory.getCurrentSession().saveOrUpdate(entity);
}
/**
* Get {@link LargeObject} entity by id. Throws exception in case entity with provided id does not exist.
*
* @param id entity id for loading
* @return loaded entity or null if not found
*/
public LargeObject get(int id) {
return (LargeObject) sessionFactory.getCurrentSession().get(LargeObject.class, id);
}
/**
* Deletes {@link LargeObject} entity.
*
* @param entity entity for deletion
*/
public void delete(LargeObject entity) {
sessionFactory.getCurrentSession().delete(entity);
}
/**
* Indicates that just created entity should be deleted in case another one with same hash already exists.
*
* @return true when entity is deleted when reuse found, false otherwise
*/
public boolean isDeleteWhenReuseFound() {
return deleteWhenReuseFound;
}
/**
* Set to true (default) in order to delete just persisted entity in case another one with the same hash already
* exists.
*
* @param deleteWhenReuseFound indicates if reuse entity should be used and just created entity dropped
*/
public void setDeleteWhenReuseFound(boolean deleteWhenReuseFound) {
this.deleteWhenReuseFound = deleteWhenReuseFound;
}
}
package ee.eesti.riha.rest.logic;
import ee.eesti.riha.rest.dao.FileResourceDAO;
import ee.eesti.riha.rest.dao.LargeObjectDAO;
import ee.eesti.riha.rest.model.FileResource;
import ee.eesti.riha.rest.model.LargeObject;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.StreamUtils;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.sql.SQLException;
import java.util.UUID;
@Component
public class FileResourceLogic {
@Autowired
private LargeObjectDAO largeObjectDAO;
@Autowired
private FileResourceDAO fileResourceDAO;
/**
* Creates {@link FileResource} from provided {@link InputStream} with file name and content.
*
* @param inputStream data input stream
* @param name resource name
* @param contentType resource content type
* @return UUID of created file resource
*/
@Transactional
public UUID create(InputStream inputStream, String name, String contentType) {
int largeObjectId = largeObjectDAO.create(inputStream);
LargeObject largeObject = largeObjectDAO.get(largeObjectId);
if (largeObject == null) {
throw new IllegalStateException("LargeObject with id " + largeObjectId + " is not found");
}
FileResource entity = new FileResource();
entity.setName(name);
entity.setContentType(contentType);
entity.setLargeObject(largeObject);
return fileResourceDAO.create(entity);
}
@Transactional
public FileResource get(UUID uuid) {
return fileResourceDAO.get(uuid);
}
@Transactional
public void copyLargeObjectData(UUID uuid, OutputStream output) throws SQLException, IOException {
FileResource fileResource = get(uuid);
if (fileResource == null) {
throw new IllegalStateException("FileResource with id " + uuid.toString() + " is not found");
}
StreamUtils.copy(fileResource.getLargeObject().getData().getBinaryStream(), output);
}
}
......@@ -4,6 +4,7 @@ import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import ee.eesti.riha.rest.model.Main_resource;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
......@@ -187,7 +188,10 @@ public class TableEntryCreateLogic<T extends BaseModel> {
Date dt = new Date();
String dtJsonFormat = DateHelper.FORMATTER.format(dt);
LOG.info(dtJsonFormat);
jsonContent.addProperty("creation_date", dtJsonFormat);
if (classRepresentingTable != Main_resource.class) {
jsonContent.addProperty("creation_date", dtJsonFormat);
}
// save primary key to json_content as well
String pkFieldName = createPKFieldName(classRepresentingTable);
......@@ -200,6 +204,7 @@ public class TableEntryCreateLogic<T extends BaseModel> {
// Set required fields
entity.callSetId(pkId);
entity.setCreation_date(dt);
// When dealing with json content table, initialize json_content field with source json and check if it was set correctly
if (JsonContentBasedTable.isJsonContentBasedTable(classRepresentingTable)) {
......
package ee.eesti.riha.rest.logic.util;
import java.io.FilterInputStream;
import java.io.IOException;
import java.io.InputStream;
/**
* Calculates {@link InputStream} size while it is being read.
*/
public class LengthCalculatingInputStream extends FilterInputStream {
private long length = 0;
public LengthCalculatingInputStream(InputStream in) {
super(in);
}
public long getLength() {
return length;
}
public void setLength(long length) {
this.length = length;
}
@Override
public int read() throws IOException {
int result = super.read();
if (result != -1) {
this.length += 1;
}
return result;
}
@Override
public int read(byte[] b, int off, int len) throws IOException {
int result = super.read(b, off, len);
if (result != -1) {
this.length += result;
}
return result;
}
}
package ee.eesti.riha.rest.model;
import org.hibernate.annotations.DynamicUpdate;
import org.hibernate.annotations.GenericGenerator;
import org.hibernate.annotations.Type;
import javax.persistence.*;
import java.util.Date;
import java.util.UUID;
@Entity
@Table(name = "file_resource")
@DynamicUpdate
public class FileResource {
@Id
@GeneratedValue(generator = "uuid")
@GenericGenerator(name = "uuid", strategy = "uuid2")
@Column(name = "uuid", unique = true)
@Type(type = "pg-uuid")
private UUID uuid;
@Temporal(TemporalType.TIMESTAMP)
@Column(name = "creation_date")
private Date creationDate;
@Column(name = "name")
private String name;
@Column(name = "content_type")
private String contentType;
@ManyToOne(targetEntity = LargeObject.class)
@JoinColumn(name = "large_object_id", nullable = false)
private LargeObject largeObject;
public UUID getUuid() {
return uuid;
}
public void setUuid(UUID uuid) {
this.uuid = uuid;
}
public Date getCreationDate() {
return creationDate;
}
public void setCreationDate(Date creationDate) {
this.creationDate = creationDate;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getContentType() {
return contentType;
}
public void setContentType(String contentType) {
this.contentType = contentType;
}
public LargeObject getLargeObject() {
return largeObject;
}
public void setLargeObject(LargeObject largeObject) {
this.largeObject = largeObject;
}
}
\ No newline at end of file
package ee.eesti.riha.rest.model;
import org.hibernate.annotations.DynamicUpdate;
import javax.persistence.*;
import java.sql.Blob;
import java.util.Date;
import static javax.persistence.GenerationType.AUTO;
/**
* Entity for holding different large objects like document attachments or arbitrary files
*/
@Entity
@Table(name = "large_object")
@DynamicUpdate
public class LargeObject {
@Id
@GeneratedValue(strategy = AUTO, generator = "large_object_seq")
@SequenceGenerator(name = "large_object_seq", sequenceName = "large_object_seq")
@Column(name = "id", updatable = false)
private int id;
@Temporal(TemporalType.TIMESTAMP)
@Column(name = "creation_date")
private Date creationDate;
@Column(name = "hash")
private String hash;
@Column(name = "length")
private Long length;
@Lob
@Column(name = "data")
private Blob data;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public Date getCreationDate() {
return creationDate;
}
public void setCreationDate(Date creationDate) {
this.creationDate = creationDate;
}
public String getHash() {
return hash;
}
public void setHash(String hash) {
this.hash = hash;
}
public Long getLength() {
return length;
}
public void setLength(Long length) {
this.length = length;
}
public Blob getData() {
return data;
}
public void setData(Blob data) {
this.data = data;
}
}
......@@ -48,6 +48,9 @@ public class Main_resource_view implements BaseModel {
private String kind;
@Column(name = "j_creation_timestamp")
private Date j_creation_timestamp;
public Integer getMain_resource_id() {
return main_resource_id;