diff --git a/.gitignore b/.gitignore
index dbdc5f259542a58c7ff393bc82c249f21fb43b7d..d49fa3b2094287633828ef1433775c4326f8d725 100644
--- a/.gitignore
+++ b/.gitignore
@@ -41,3 +41,4 @@ yarn-debug.log*
/.idea/*
/config/auths.yml
+db_env_var.txt
diff --git a/Gemfile b/Gemfile
index 25aefc1f4fb6be54124fdd219e81a96bb6304d0c..bdea39956f1f381971998bd0c3e10535f1c136a5 100644
--- a/Gemfile
+++ b/Gemfile
@@ -1,64 +1,69 @@
-source 'https://rubygems.org'
+source "https://rubygems.org"
git_source(:github) { |repo| "https://github.com/#{repo}.git" }
-ruby '3.0.1'
+ruby "3.0.1"
# Bundle edge Rails instead: gem 'rails', github: 'rails/rails', branch: 'main'
-gem 'rails', '~> 6.1.3', '>= 6.1.3.2'
+gem "rails", "~> 6.1.3", ">= 6.1.3.2"
# Use sqlite3 as the database for Active Record
-# gem 'sqlite3', '~> 1.4'
-gem 'pg'
-gem 'devise'
+# gem 'sqlite3'
+gem "pg"
+# gem 'devise'
# Use Puma as the app server
-gem 'puma', '~> 5.0'
+gem "puma", "~> 5.0"
# Use SCSS for stylesheets
-gem 'sass-rails', '>= 6'
+gem "sass-rails", ">= 6"
# Transpile app-like JavaScript. Read more: https://github.com/rails/webpacker
-gem 'webpacker', '~> 5.0'
+gem "webpacker"
# Turbolinks makes navigating your web application faster. Read more: https://github.com/turbolinks/turbolinks
-gem 'turbolinks', '~> 5'
+gem "turbolinks", "~> 5"
# Build JSON APIs with ease. Read more: https://github.com/rails/jbuilder
-gem 'jbuilder', '~> 2.7'
+gem "jbuilder", "~> 2.7"
# Use Redis adapter to run Action Cable in production
# gem 'redis', '~> 4.0'
# Use Active Model has_secure_password
# gem 'bcrypt', '~> 3.1.7'
-gem 'rsolr'
-gem 'sidekiq', '~> 6.0'
-gem 'pragmatic_tokenizer'
-gem 'activerecord-session_store'
+# Use OpenSSL for encryption of Auth Token
+gem "openssl"
+# Use rSolr for search queries
+gem "rsolr"
+# Use rack-cors for cross-origin requests
+gem "rack-cors"
+gem "sidekiq", "~> 6.0"
+gem "pragmatic_tokenizer"
+gem "activerecord-session_store"
# Use Active Storage variant
# gem 'image_processing', '~> 1.2'
# Reduces boot times through caching; required in config/boot.rb
-gem 'bootsnap', '>= 1.4.4', require: false
+gem "bootsnap", ">= 1.4.4", require: false
group :development, :test do
# Call 'byebug' anywhere in the code to stop execution and get a debugger console
- gem 'byebug', platforms: [:mri, :mingw, :x64_mingw]
+ gem "byebug", platforms: [:mri, :mingw, :x64_mingw]
end
group :development do
# Access an interactive console on exception pages or by calling 'console' anywhere in the code.
- gem 'web-console', '>= 4.1.0'
+ gem "web-console", ">= 4.1.0"
# Display performance information such as SQL time and flame graphs for each request in your browser.
# Can be configured to work on production as well see: https://github.com/MiniProfiler/rack-mini-profiler/blob/master/README.md
- gem 'rack-mini-profiler', '~> 2.0'
- gem 'listen', '~> 3.3'
+ gem "rack-mini-profiler", "~> 2.0"
+ gem "listen", "~> 3.3"
# Spring speeds up development by keeping your application running in the background. Read more: https://github.com/rails/spring
- gem 'spring'
- gem 'brakeman'
+ gem "spring"
+ gem "brakeman"
end
group :test do
# Adds support for Capybara system testing and selenium driver
- gem 'capybara', '>= 3.26'
- gem 'selenium-webdriver'
+ gem "capybara", ">= 3.26"
+ gem "selenium-webdriver"
# Easy installation and use of web drivers to run system tests with browsers
- gem 'webdrivers'
+ gem "webdrivers"
end
# Windows does not include zoneinfo files, so bundle the tzinfo-data gem
-gem 'tzinfo-data', platforms: [:mingw, :mswin, :x64_mingw, :jruby]
+gem "tzinfo-data", platforms: [:mingw, :mswin, :x64_mingw, :jruby]
diff --git a/Gemfile.lock b/Gemfile.lock
index 702ebd8999931e37b38b5fbb85e7ed80d1ddc6e9..4fd921cdfc2edb5c082bd52e972a3832387799ea 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -1,66 +1,66 @@
GEM
remote: https://rubygems.org/
specs:
- actioncable (6.1.4)
- actionpack (= 6.1.4)
- activesupport (= 6.1.4)
+ actioncable (6.1.6.1)
+ actionpack (= 6.1.6.1)
+ activesupport (= 6.1.6.1)
nio4r (~> 2.0)
websocket-driver (>= 0.6.1)
- actionmailbox (6.1.4)
- actionpack (= 6.1.4)
- activejob (= 6.1.4)
- activerecord (= 6.1.4)
- activestorage (= 6.1.4)
- activesupport (= 6.1.4)
+ actionmailbox (6.1.6.1)
+ actionpack (= 6.1.6.1)
+ activejob (= 6.1.6.1)
+ activerecord (= 6.1.6.1)
+ activestorage (= 6.1.6.1)
+ activesupport (= 6.1.6.1)
mail (>= 2.7.1)
- actionmailer (6.1.4)
- actionpack (= 6.1.4)
- actionview (= 6.1.4)
- activejob (= 6.1.4)
- activesupport (= 6.1.4)
+ actionmailer (6.1.6.1)
+ actionpack (= 6.1.6.1)
+ actionview (= 6.1.6.1)
+ activejob (= 6.1.6.1)
+ activesupport (= 6.1.6.1)
mail (~> 2.5, >= 2.5.4)
rails-dom-testing (~> 2.0)
- actionpack (6.1.4)
- actionview (= 6.1.4)
- activesupport (= 6.1.4)
+ actionpack (6.1.6.1)
+ actionview (= 6.1.6.1)
+ activesupport (= 6.1.6.1)
rack (~> 2.0, >= 2.0.9)
rack-test (>= 0.6.3)
rails-dom-testing (~> 2.0)
rails-html-sanitizer (~> 1.0, >= 1.2.0)
- actiontext (6.1.4)
- actionpack (= 6.1.4)
- activerecord (= 6.1.4)
- activestorage (= 6.1.4)
- activesupport (= 6.1.4)
+ actiontext (6.1.6.1)
+ actionpack (= 6.1.6.1)
+ activerecord (= 6.1.6.1)
+ activestorage (= 6.1.6.1)
+ activesupport (= 6.1.6.1)
nokogiri (>= 1.8.5)
- actionview (6.1.4)
- activesupport (= 6.1.4)
+ actionview (6.1.6.1)
+ activesupport (= 6.1.6.1)
builder (~> 3.1)
erubi (~> 1.4)
rails-dom-testing (~> 2.0)
rails-html-sanitizer (~> 1.1, >= 1.2.0)
- activejob (6.1.4)
- activesupport (= 6.1.4)
+ activejob (6.1.6.1)
+ activesupport (= 6.1.6.1)
globalid (>= 0.3.6)
- activemodel (6.1.4)
- activesupport (= 6.1.4)
- activerecord (6.1.4)
- activemodel (= 6.1.4)
- activesupport (= 6.1.4)
+ activemodel (6.1.6.1)
+ activesupport (= 6.1.6.1)
+ activerecord (6.1.6.1)
+ activemodel (= 6.1.6.1)
+ activesupport (= 6.1.6.1)
activerecord-session_store (2.0.0)
actionpack (>= 5.2.4.1)
activerecord (>= 5.2.4.1)
multi_json (~> 1.11, >= 1.11.2)
rack (>= 2.0.8, < 3)
railties (>= 5.2.4.1)
- activestorage (6.1.4)
- actionpack (= 6.1.4)
- activejob (= 6.1.4)
- activerecord (= 6.1.4)
- activesupport (= 6.1.4)
- marcel (~> 1.0.0)
+ activestorage (6.1.6.1)
+ actionpack (= 6.1.6.1)
+ activejob (= 6.1.6.1)
+ activerecord (= 6.1.6.1)
+ activesupport (= 6.1.6.1)
+ marcel (~> 1.0)
mini_mime (>= 1.1.0)
- activesupport (6.1.4)
+ activesupport (6.1.6.1)
concurrent-ruby (~> 1.0, >= 1.0.2)
i18n (>= 1.6, < 2)
minitest (>= 5.1)
@@ -68,124 +68,110 @@ GEM
zeitwerk (~> 2.3)
addressable (2.8.0)
public_suffix (>= 2.0.2, < 5.0)
- bcrypt (3.1.16)
bindex (0.8.1)
- bootsnap (1.7.5)
- msgpack (~> 1.0)
- brakeman (5.1.1)
+ bootsnap (1.13.0)
+ msgpack (~> 1.2)
+ brakeman (5.2.3)
builder (3.2.4)
byebug (11.1.3)
- capybara (3.35.3)
+ capybara (3.37.1)
addressable
+ matrix
mini_mime (>= 0.1.3)
nokogiri (~> 1.8)
rack (>= 1.6.0)
rack-test (>= 0.6.3)
regexp_parser (>= 1.5, < 3.0)
xpath (~> 3.2)
- childprocess (3.0.0)
- concurrent-ruby (1.1.9)
+ childprocess (4.1.0)
+ concurrent-ruby (1.1.10)
connection_pool (2.2.5)
crass (1.0.6)
- devise (4.8.0)
- bcrypt (~> 3.0)
- orm_adapter (~> 0.1)
- railties (>= 4.1.0)
- responders
- warden (~> 1.2.3)
erubi (1.10.0)
- faraday (1.4.2)
- faraday-em_http (~> 1.0)
- faraday-em_synchrony (~> 1.0)
- faraday-excon (~> 1.1)
- faraday-net_http (~> 1.0)
- faraday-net_http_persistent (~> 1.1)
- multipart-post (>= 1.2, < 3)
+ faraday (2.4.0)
+ faraday-net_http (~> 2.0)
ruby2_keywords (>= 0.0.4)
- faraday-em_http (1.0.0)
- faraday-em_synchrony (1.0.0)
- faraday-excon (1.1.0)
- faraday-net_http (1.0.1)
- faraday-net_http_persistent (1.1.0)
- ffi (1.15.3)
- globalid (0.4.2)
- activesupport (>= 4.2.0)
- i18n (1.8.10)
+ faraday-net_http (2.1.0)
+ ffi (1.15.5)
+ globalid (1.0.0)
+ activesupport (>= 5.0)
+ i18n (1.12.0)
concurrent-ruby (~> 1.0)
- jbuilder (2.11.2)
+ jbuilder (2.11.5)
+ actionview (>= 5.0.0)
activesupport (>= 5.0.0)
- listen (3.6.0)
+ listen (3.7.1)
rb-fsevent (~> 0.10, >= 0.10.3)
rb-inotify (~> 0.9, >= 0.9.10)
- loofah (2.10.0)
+ loofah (2.18.0)
crass (~> 1.0.2)
nokogiri (>= 1.5.9)
mail (2.7.1)
mini_mime (>= 0.1.1)
- marcel (1.0.1)
+ marcel (1.0.2)
+ matrix (0.4.2)
method_source (1.0.0)
- mini_mime (1.1.0)
- minitest (5.14.4)
- msgpack (1.4.2)
+ mini_mime (1.1.2)
+ minitest (5.16.2)
+ msgpack (1.5.4)
multi_json (1.15.0)
- multipart-post (2.1.1)
- nio4r (2.5.7)
- nokogiri (1.11.7-x86_64-linux)
+ nio4r (2.5.8)
+ nokogiri (1.13.8-x86_64-linux)
racc (~> 1.4)
- orm_adapter (0.5.0)
- pg (1.2.3)
+ openssl (3.0.0)
+ pg (1.4.2)
pragmatic_tokenizer (3.2.0)
unicode
- public_suffix (4.0.6)
- puma (5.3.2)
+ public_suffix (4.0.7)
+ puma (5.6.4)
nio4r (~> 2.0)
- racc (1.5.2)
- rack (2.2.3)
- rack-mini-profiler (2.3.2)
+ racc (1.6.0)
+ rack (2.2.4)
+ rack-cors (1.1.1)
+ rack (>= 2.0.0)
+ rack-mini-profiler (2.3.4)
rack (>= 1.2.0)
- rack-proxy (0.7.0)
+ rack-proxy (0.7.2)
rack
- rack-test (1.1.0)
- rack (>= 1.0, < 3)
- rails (6.1.4)
- actioncable (= 6.1.4)
- actionmailbox (= 6.1.4)
- actionmailer (= 6.1.4)
- actionpack (= 6.1.4)
- actiontext (= 6.1.4)
- actionview (= 6.1.4)
- activejob (= 6.1.4)
- activemodel (= 6.1.4)
- activerecord (= 6.1.4)
- activestorage (= 6.1.4)
- activesupport (= 6.1.4)
+ rack-test (2.0.2)
+ rack (>= 1.3)
+ rails (6.1.6.1)
+ actioncable (= 6.1.6.1)
+ actionmailbox (= 6.1.6.1)
+ actionmailer (= 6.1.6.1)
+ actionpack (= 6.1.6.1)
+ actiontext (= 6.1.6.1)
+ actionview (= 6.1.6.1)
+ activejob (= 6.1.6.1)
+ activemodel (= 6.1.6.1)
+ activerecord (= 6.1.6.1)
+ activestorage (= 6.1.6.1)
+ activesupport (= 6.1.6.1)
bundler (>= 1.15.0)
- railties (= 6.1.4)
+ railties (= 6.1.6.1)
sprockets-rails (>= 2.0.0)
rails-dom-testing (2.0.3)
activesupport (>= 4.2.0)
nokogiri (>= 1.6)
- rails-html-sanitizer (1.3.0)
+ rails-html-sanitizer (1.4.3)
loofah (~> 2.3)
- railties (6.1.4)
- actionpack (= 6.1.4)
- activesupport (= 6.1.4)
+ railties (6.1.6.1)
+ actionpack (= 6.1.6.1)
+ activesupport (= 6.1.6.1)
method_source
- rake (>= 0.13)
+ rake (>= 12.2)
thor (~> 1.0)
rake (13.0.6)
- rb-fsevent (0.11.0)
+ rb-fsevent (0.11.1)
rb-inotify (0.10.1)
ffi (~> 1.0)
- redis (4.4.0)
- regexp_parser (2.1.1)
- responders (3.0.1)
- actionpack (>= 5.0)
- railties (>= 5.0)
- rsolr (2.3.0)
+ redis (4.7.1)
+ regexp_parser (2.5.0)
+ rexml (3.2.5)
+ rsolr (2.5.0)
builder (>= 2.1.2)
- faraday (>= 0.9.0)
- ruby2_keywords (0.0.4)
+ faraday (>= 0.9, < 3, != 2.0.0)
+ ruby2_keywords (0.0.5)
rubyzip (2.3.2)
sass-rails (6.0.0)
sassc-rails (~> 2.1, >= 2.1.1)
@@ -197,52 +183,53 @@ GEM
sprockets (> 3.0)
sprockets-rails
tilt
- selenium-webdriver (3.142.7)
- childprocess (>= 0.5, < 4.0)
- rubyzip (>= 1.2.2)
+ selenium-webdriver (4.3.0)
+ childprocess (>= 0.5, < 5.0)
+ rexml (~> 3.2, >= 3.2.5)
+ rubyzip (>= 1.2.2, < 3.0)
+ websocket (~> 1.0)
semantic_range (3.0.0)
- sidekiq (6.2.2)
+ sidekiq (6.5.1)
connection_pool (>= 2.2.2)
rack (~> 2.0)
redis (>= 4.2.0)
- spring (2.1.1)
- sprockets (4.0.2)
+ spring (4.0.0)
+ sprockets (4.1.1)
concurrent-ruby (~> 1.0)
rack (> 1, < 3)
- sprockets-rails (3.2.2)
- actionpack (>= 4.0)
- activesupport (>= 4.0)
+ sprockets-rails (3.4.2)
+ actionpack (>= 5.2)
+ activesupport (>= 5.2)
sprockets (>= 3.0.0)
- thor (1.1.0)
- tilt (2.0.10)
+ thor (1.2.1)
+ tilt (2.0.11)
turbolinks (5.2.1)
turbolinks-source (~> 5.2)
turbolinks-source (5.2.0)
- tzinfo (2.0.4)
+ tzinfo (2.0.5)
concurrent-ruby (~> 1.0)
unicode (0.4.4.4)
- warden (1.2.9)
- rack (>= 2.0.9)
- web-console (4.1.0)
+ web-console (4.2.0)
actionview (>= 6.0.0)
activemodel (>= 6.0.0)
bindex (>= 0.4.0)
railties (>= 6.0.0)
- webdrivers (4.6.0)
+ webdrivers (5.0.0)
nokogiri (~> 1.6)
rubyzip (>= 1.3.0)
- selenium-webdriver (>= 3.0, < 4.0)
- webpacker (5.4.0)
+ selenium-webdriver (~> 4.0)
+ webpacker (5.4.3)
activesupport (>= 5.2)
rack-proxy (>= 0.6.1)
railties (>= 5.2)
semantic_range (>= 2.3.0)
+ websocket (1.2.9)
websocket-driver (0.7.5)
websocket-extensions (>= 0.1.0)
websocket-extensions (0.1.5)
xpath (3.2.0)
nokogiri (~> 1.8)
- zeitwerk (2.4.2)
+ zeitwerk (2.6.0)
PLATFORMS
x86_64-linux
@@ -253,12 +240,13 @@ DEPENDENCIES
brakeman
byebug
capybara (>= 3.26)
- devise
jbuilder (~> 2.7)
listen (~> 3.3)
+ openssl
pg
pragmatic_tokenizer
puma (~> 5.0)
+ rack-cors
rack-mini-profiler (~> 2.0)
rails (~> 6.1.3, >= 6.1.3.2)
rsolr
@@ -270,7 +258,7 @@ DEPENDENCIES
tzinfo-data
web-console (>= 4.1.0)
webdrivers
- webpacker (~> 5.0)
+ webpacker
RUBY VERSION
ruby 3.0.1p64
diff --git a/app/channels/application_cable/connection.rb b/app/channels/application_cable/connection.rb
index af25058ad707f8107aa72d6a5983c06975e3ef4e..63e13b8341b3e383c66f183c2012e90615e4ff25 100644
--- a/app/channels/application_cable/connection.rb
+++ b/app/channels/application_cable/connection.rb
@@ -1,21 +1,19 @@
module ApplicationCable
- ##
- # This class identifies the current user in a websocket communication using ApplicationCable
+ ##
+ # This class identifies the current user in a websocket communication using ApplicationCable
class Connection < ActionCable::Connection::Base
- identified_by :current_user
+ identified_by :current_user
- def connect
- self.current_user = find_user
- end
+ def connect
+ self.current_user = find_user
+ end
- def find_user
- user_id = cookies.signed["user.id"]
- current_user = User.find_by(id: user_id)
- if current_user
- current_user
- else
- reject_unauthorized_connection
- end
+ def find_user
+ if current_user = User.find_by(id: cookies.encrypted['_web_session']['current_user_id'])
+ current_user
+ else
+ reject_unauthorized_connection
end
+ end
end
end
diff --git a/app/controllers/application_controller.rb b/app/controllers/application_controller.rb
index 45c45f8c73705936b175dbeddf1872a2d524b227..6b85949e46779666c8207707ef622e6a7c1b46c8 100644
--- a/app/controllers/application_controller.rb
+++ b/app/controllers/application_controller.rb
@@ -1,9 +1,10 @@
class ApplicationController < ActionController::Base
+ include Authentication
+ include AddProxyRequestOrigin
- def send_file
- File.open("tmp/#{params[:filename]}", 'r') do |f|
- send_data f.read, type: "text/json", filename: params[:filename]
- end
+ def send_file
+ File.open("tmp/#{params[:filename]}", "r") do |f|
+ send_data f.read, type: "text/json", filename: params[:filename]
end
-
+ end
end
diff --git a/app/controllers/catalog_controller.rb b/app/controllers/catalog_controller.rb
index 2b7a4aadb88fc1af252e7cab37f6b438f9067e0f..2d8f5150458ad9e877bd6f069e7cf7ba6d0a7cf6 100644
--- a/app/controllers/catalog_controller.rb
+++ b/app/controllers/catalog_controller.rb
@@ -1,220 +1,222 @@
class CatalogController < ApplicationController
+ before_action :authenticate_user!, :strip_input_fields
- before_action :authenticate_user!, :strip_input_fields
+ def home
+ end
- def home
-
- end
-
- ##
- # Creates a search query and submit it to the index. Retrieve and displays results + metadata.
- def index
- if params[:q]
- @search_type = params[:search_type].nil? ? "exact" : params[:search_type]
- @solr_params = SolrQuery.new(@search_type).to_params
- @solr_params[:q] = params[:q]
- @solr_params[:rows] = params[:per_page] if params[:per_page]
- @current_page = params[:page].to_i != 0 ? params[:page].to_i : 1
- @solr_params[:start] = params[:page].to_i != 0 ? @solr_params[:rows] * (params[:page].to_i-1) : 0
- @solr_params[:sort] = params[:sort] if params[:sort]
- if params[:f]
- params[:f].each do |k,v|
- if k == "date_created_dtsi" # v is a hash {to: "", from: ""}
- @solr_params[:fq] << "#{k}:[#{v['from']}T00:00:00Z TO #{v['to']}T00:00:00Z]"
- else
- if v.is_a? Array
- v.each do |val|
- @solr_params[:fq] << "#{k}:#{val}"
- end
- end
- end
- end
+ ##
+ # Creates a search query and submit it to the index. Retrieve and displays results + metadata.
+ def index
+ if params[:q]
+ @search_type = params[:search_type].nil? ? "exact" : params[:search_type]
+ @solr_params = SolrQuery.new(@search_type).to_params
+ @solr_params[:q] = params[:q]
+ @solr_params[:rows] = params[:per_page] if params[:per_page]
+ @current_page = params[:page].to_i != 0 ? params[:page].to_i : 1
+ @solr_params[:start] = params[:page].to_i != 0 ? @solr_params[:rows] * (params[:page].to_i - 1) : 0
+ @solr_params[:sort] = params[:sort] if params[:sort]
+ if params[:f]
+ params[:f].each do |k, v|
+ if k == "date_created_dtsi" # v is a hash {to: "", from: ""}
+ @solr_params[:fq] << "#{k}:[#{v["from"]}T00:00:00Z TO #{v["to"]}T00:00:00Z]"
+ else
+ if v.is_a? Array
+ v.each do |val|
+ @solr_params[:fq] << "#{k}:#{val}"
+ end
end
- session['search_params'] = @solr_params
- session['query_params'] = params.to_unsafe_h.slice('q', 'page', 'per_page','sort', 'f')
- @results = SolrSearcher.query @solr_params
- puts @results.to_json if Rails.env == "development"
- @resulting_docs = @results['response']['docs'].map do |solr_doc|
- case solr_doc['has_model_ssim']
- when ['Article']
- Article.from_solr_doc solr_doc
- when ['Issue']
- Issue.from_solr_doc solr_doc
- end
- end
- entities_fields = I18n.t("newspapers.solr_fields").values_at(:persons, :locations, :organisations, :human_productions)
- @entities_labels = []
- entities_fields.each do |entity_field|
- (@entities_labels << @results['facets'][entity_field]['buckets'].map{|ne| ne['val']}).flatten! if @results['facets'][entity_field]
- end
- @entities_labels = helpers.get_entity_label @entities_labels
+ end
+ end
+ end
+ session["search_params"] = @solr_params
+ session["query_params"] = params.to_unsafe_h.slice("q", "page", "per_page", "sort", "f")
+ @results = SolrSearcher.query @solr_params
+ # puts @results.to_json if Rails.env == "development"
+ @resulting_docs = @results["response"]["docs"].map do |solr_doc|
+ case solr_doc["has_model_ssim"]
+ when ["Article"]
+ Article.from_solr_doc solr_doc
+ when ["Issue"]
+ Issue.from_solr_doc solr_doc
end
+ end
+ entities_fields = I18n.t("newspapers.solr_fields").values_at(:persons, :locations, :organisations, :human_productions)
+ @entities_labels = []
+ entities_fields.each do |entity_field|
+ (@entities_labels << @results["facets"][entity_field]["buckets"].map { |ne| ne["val"] }).flatten! if @results["facets"][entity_field]
+ end
+ @entities_labels = helpers.get_entity_label @entities_labels
end
+ end
- ##
- # Display an issue
- def show
- @issue = Issue.from_solr params[:id], with_pages=true, with_articles=true
- session['named_entities'] = Issue.named_entities @issue.id
- session['named_entities_labels'] = helpers.get_linked_entities session['named_entities'].map{ |k,v| v.keys }.flatten.uniq
- end
+ ##
+ # Display an issue
+ def show
+ @issue = Issue.from_solr params[:id], with_pages = true, with_articles = true
+ end
- ##
- # Retrieve named entities for a list of documents (issue and/or articles)
- def named_entities_for_docs
- named_entities = {LOC: {}, PER: {}, ORG: {}, HumanProd: {}}
- params[:docs_ids].each do |doc_id|
- if doc_id.index('_article_').nil?
- doc_named_entities = session['named_entities']
- else # if article, filter stored list
- doc_named_entities = session['named_entities'].map{ |ne_type, ne_list|
- [ne_type,ne_list.select{ |linked_id, namedentities|
- namedentities.any?{ |ne|
- ne['article_id_ssi'] == doc_id
- }
- }.map{ |k,v| [k,v.select{ |ne| ne['article_id_ssi'] == doc_id }] }.to_h]
- }.to_h
- end
- named_entities[:LOC] = named_entities[:LOC].merge(doc_named_entities[:LOC]) do |key,oldval,newval|
- oldval.concat newval
- end
- named_entities[:ORG] = named_entities[:ORG].merge(doc_named_entities[:ORG]) do |key,oldval,newval|
- oldval.concat newval
- end
- named_entities[:PER] = named_entities[:PER].merge(doc_named_entities[:PER]) do |key,oldval,newval|
- oldval.concat newval
- end
- named_entities[:HumanProd] = named_entities[:HumanProd].merge(doc_named_entities[:HumanProd]) do |key,oldval,newval|
- oldval.concat newval
- end
- end
- render partial: 'named_entities/named_entities', locals: {named_entities: named_entities, linked_entities: session['named_entities_labels']}
+ ##
+ # Retrieve named entities for a list of documents (issue and/or articles)
+ def named_entities_for_docs
+ named_entities = { LOC: {}, PER: {}, ORG: {}, HumanProd: {} }
+ linked_entities = {}
+ params[:docs_ids].each do |doc_id|
+ if doc_id.index("_article_").nil? # if issue, get named entities from Issue
+ doc_named_entities = Issue.named_entities doc_id
+ else # if article, filter stored list
+ issue_id = doc_id.split(/_article_/, 2).first
+ doc_named_entities = Issue.named_entities issue_id
+ doc_named_entities = doc_named_entities.map { |ne_type, ne_list|
+ [ne_type, ne_list.select { |linked_id, namedentities|
+ namedentities.any? { |ne|
+ ne["article_id_ssi"] == doc_id
+ }
+ }.map { |k, v| [k, v.select { |ne| ne["article_id_ssi"] == doc_id }] }.to_h]
+ }.to_h
+ end
+ named_entities[:LOC] = named_entities[:LOC].merge(doc_named_entities[:LOC]) do |key, oldval, newval|
+ oldval.concat newval
+ end
+ named_entities[:ORG] = named_entities[:ORG].merge(doc_named_entities[:ORG]) do |key, oldval, newval|
+ oldval.concat newval
+ end
+ named_entities[:PER] = named_entities[:PER].merge(doc_named_entities[:PER]) do |key, oldval, newval|
+ oldval.concat newval
+ end
+ named_entities[:HumanProd] = named_entities[:HumanProd].merge(doc_named_entities[:HumanProd]) do |key, oldval, newval|
+ oldval.concat newval
+ end
+ doc_named_entities_labels = helpers.get_linked_entities doc_named_entities.map { |k, v| v.keys }.flatten.uniq
+ linked_entities.merge!(doc_named_entities_labels)
end
+ render partial: "named_entities/named_entities", locals: { named_entities: named_entities, linked_entities: linked_entities }
+ end
- ##
- # Retrieve named entities for a dataset
- def named_entities_for_dataset
- dataset = Dataset.find(params[:dataset_id])
- named_entities = dataset.named_entities
- named_entities_labels = helpers.get_linked_entities named_entities.map{ |k,v| v.keys }.flatten.uniq
- render partial: 'named_entities/named_entities', locals: {named_entities: named_entities, linked_entities: named_entities_labels}
- end
+ ##
+ # Retrieve named entities for a dataset
+ def named_entities_for_dataset
+ dataset = Dataset.find(params[:dataset_id])
+ named_entities = dataset.named_entities
+ named_entities_labels = helpers.get_linked_entities named_entities.map { |k, v| v.keys }.flatten.uniq
+ render partial: "named_entities/named_entities", locals: { named_entities: named_entities, linked_entities: named_entities_labels }
+ end
- ##
- # Retrieve and display paginated facets
- def paginate_facets
- out = {}
- if params[:field_name] != ""
- search_params = session['search_params']
- search_params['rows'] = 0
- search_params['json.facet'] = {"#{params[:field_name]}": {terms: {
- field: params[:field_name],
- limit: 15,
- numBuckets: true,
- offset: (params[:current_page].to_i-1) * 15}}}.to_json
- res = SolrSearcher.query search_params
- entities_labels = [res['facets'][params[:field_name]]['buckets'].map{|ne| ne['val']}]
- entities_labels = helpers.get_entity_label entities_labels
- facet_constraints = search_params['fq'].select { |fq| fq.split(':')[0] == params[:field_name] }.map{|fq| {label: params[:field_name], value: fq.split(':')[1]} }
- out[:facets_entries] = []
- res['facets'][params[:field_name]]['buckets'].each do |facet_entry|
- out[:facets_entries] << render_to_string(layout: false, partial: "facet_entry", locals: {
- entities_labels: entities_labels,
- facet_constraints: facet_constraints,
- field: params[:field_name],
- facet: facet_entry,
- index: params[:current_page].to_i,
- per_page: 15
- })
- end
- end
- out[:pagination] = render_to_string(layout: false, partial: 'facet_pagination', locals: {nb_pages: params[:nb_pages].to_i, current_page: params[:current_page].to_i})
- render json: out
+ ##
+ # Retrieve and display paginated facets
+ def paginate_facets
+ out = {}
+ if params[:field_name] != ""
+ search_params = session["search_params"]
+ search_params["rows"] = 0
+ search_params["json.facet"] = { "#{params[:field_name]}": { terms: {
+ field: params[:field_name],
+ limit: 15,
+ numBuckets: true,
+ offset: (params[:current_page].to_i - 1) * 15,
+ } } }.to_json
+ res = SolrSearcher.query search_params
+ entities_labels = [res["facets"][params[:field_name]]["buckets"].map { |ne| ne["val"] }]
+ entities_labels = helpers.get_entity_label entities_labels
+ facet_constraints = search_params["fq"].select { |fq| fq.split(":")[0] == params[:field_name] }.map { |fq| { label: params[:field_name], value: fq.split(":")[1] } }
+ out[:facets_entries] = []
+ res["facets"][params[:field_name]]["buckets"].each do |facet_entry|
+ out[:facets_entries] << render_to_string(layout: false, partial: "facet_entry", locals: {
+ entities_labels: entities_labels,
+ facet_constraints: facet_constraints,
+ field: params[:field_name],
+ facet: facet_entry,
+ index: params[:current_page].to_i,
+ per_page: 15,
+ })
+ end
end
+ out[:pagination] = render_to_string(layout: false, partial: "facet_pagination", locals: { nb_pages: params[:nb_pages].to_i, current_page: params[:current_page].to_i })
+ render json: out
+ end
- ##
- # Open modal for date frequencies histogram in wide format
- def wide_dates_histogram
- out = {}
- out[:modal_content] = render_to_string(layout: false, partial: "wide_dates_histogram")
- render json: out
- end
+ ##
+ # Open modal for date frequencies histogram in wide format
+ def wide_dates_histogram
+ out = {}
+ out[:modal_content] = render_to_string(layout: false, partial: "wide_dates_histogram")
+ render json: out
+ end
- ##
- # Open Modal to confirm the creation of a compound article
- def confirm_compound_creation
- out = {}
- out[:modal_content] = render_to_string(layout: false, partial: "confirm_compound_creation", locals: {article_parts: params[:article_parts]})
- render json: out
- end
+ ##
+ # Open Modal to confirm the creation of a compound article
+ def confirm_compound_creation
+ out = {}
+ out[:modal_content] = render_to_string(layout: false, partial: "confirm_compound_creation", locals: { article_parts: params[:article_parts] })
+ render json: out
+ end
- ##
- # Create a new compound article
- def create_compound
- compound = CompoundArticle.new
- compound.user = current_user
- compound.title = params[:title]
- compound.issue_id = params[:issue_id]
- issue = Issue.from_solr params[:issue_id]
- compound.newspaper = issue.newspaper
- compound.date_created = issue.date_created
- compound.thumbnail_url = issue.thumbnail_url
- compound.language = issue.language
- compound.all_text = params[:all_text]
- compound.parts = params[:article_parts_ids]
- begin
- compound.save!
- render json: {status: 'ok', html: render_to_string(layout: false, partial: "compound_articles_panel", locals: {issue_id: params[:issue_id]})}
- rescue ActiveRecord::RecordNotUnique
- render json: {status: "error", message: "A compound article with this title already exists."}
- rescue ActiveRecord::RecordInvalid
- render json: {status: "error", message: "The title should not be blank."}
- end
+ ##
+ # Create a new compound article
+ def create_compound
+ compound = CompoundArticle.new
+ compound.user = current_user
+ compound.title = params[:title]
+ compound.issue_id = params[:issue_id]
+ issue = Issue.from_solr params[:issue_id]
+ compound.newspaper = issue.newspaper
+ compound.date_created = issue.date_created
+ compound.thumbnail_url = issue.thumbnail_url
+ compound.language = issue.language
+ compound.all_text = params[:all_text]
+ compound.parts = params[:article_parts_ids]
+ begin
+ compound.save!
+ render json: { status: "ok", html: render_to_string(layout: false, partial: "compound_articles_panel", locals: { issue_id: params[:issue_id] }) }
+ rescue ActiveRecord::RecordNotUnique
+ render json: { status: "error", message: "A compound article with this title already exists." }
+ rescue ActiveRecord::RecordInvalid
+ render json: { status: "error", message: "The title should not be blank." }
end
+ end
- ##
- # Delete an existing compound
- def delete_compound
- compound = CompoundArticle.find(params[:compound_id])
- issue_id = compound.issue_id
- current_user.datasets.each do |dataset|
- if dataset.documents.any?{|doc| doc['id'].to_s == compound.id.to_s}
- dataset.documents = dataset.documents.select{|doc| doc['id'].to_s != compound.id.to_s}
- dataset.save!
- end
- end
- compound.destroy
- out = {}
- out[:html] = render_to_string(layout: false, partial: "compound_articles_panel", locals: {issue_id: issue_id})
- out[:datasets] = render_to_string(layout: false, partial: "manage_datasets_content_show_page")
- render json: out
+ ##
+ # Delete an existing compound
+ def delete_compound
+ compound = CompoundArticle.find(params[:compound_id])
+ issue_id = compound.issue_id
+ current_user.datasets.each do |dataset|
+ if dataset.documents.any? { |doc| doc["id"].to_s == compound.id.to_s }
+ dataset.documents = dataset.documents.select { |doc| doc["id"].to_s != compound.id.to_s }
+ dataset.save!
+ end
end
+ compound.destroy
+ out = {}
+ out[:html] = render_to_string(layout: false, partial: "compound_articles_panel", locals: { issue_id: issue_id })
+ out[:datasets] = render_to_string(layout: false, partial: "manage_datasets_content_show_page")
+ render json: out
+ end
- ##
- # Retrieve and display a random sample of the result of a search
- def random_sample
- search_params = session['search_params'].with_indifferent_access
- search_params[:fq] = search_params[:fq].select {|elt| !elt.start_with? "has_model_ssim:" } if search_params[:fq]
- search_params[:fq] ||= []
- search_params[:fq] << "has_model_ssim:Article"
- search_params[:sort] = "rand#{(0...8).map { (65 + rand(26)).chr }.join} asc"
- results = SolrSearcher.query search_params
- results = results['response']['docs'].map do |solr_doc|
- case solr_doc['has_model_ssim']
- when ['Article']
- Article.from_solr_doc solr_doc
- when ['Issue']
- Issue.from_solr_doc solr_doc
- end
- end
- render json: {content: render_to_string(layout: false, partial: "random_sample", locals: {resulting_docs: results}) }
+ ##
+ # Retrieve and display a random sample of the result of a search
+ def random_sample
+ search_params = session["search_params"].with_indifferent_access
+ search_params[:fq] = search_params[:fq].select { |elt| !elt.start_with? "has_model_ssim:" } if search_params[:fq]
+ search_params[:fq] ||= []
+ search_params[:fq] << "has_model_ssim:Article"
+ search_params[:sort] = "rand#{(0...8).map { (65 + rand(26)).chr }.join} asc"
+ results = SolrSearcher.query search_params
+ results = results["response"]["docs"].map do |solr_doc|
+ case solr_doc["has_model_ssim"]
+ when ["Article"]
+ Article.from_solr_doc solr_doc
+ when ["Issue"]
+ Issue.from_solr_doc solr_doc
+ end
end
+ render json: { content: render_to_string(layout: false, partial: "random_sample", locals: { resulting_docs: results }) }
+ end
- private
+ private
- def strip_input_fields
- params.each do |key, value|
- params[key] = value.strip if value.respond_to?("strip")
- end
+ def strip_input_fields
+ params.each do |key, value|
+ params[key] = value.strip if value.respond_to?("strip")
end
+ end
end
diff --git a/app/controllers/concerns/authentication.rb b/app/controllers/concerns/authentication.rb
new file mode 100644
index 0000000000000000000000000000000000000000..fe691314b73e28ce9e57e450531c33a2095f91ab
--- /dev/null
+++ b/app/controllers/concerns/authentication.rb
@@ -0,0 +1,78 @@
+require "openssl"
+require "base64"
+
+module Authentication
+ extend ActiveSupport::Concern
+
+ included do
+ before_action :current_user
+ helper_method :current_user
+ helper_method :decrypt_header
+ end
+
+ def authenticate_user!
+ # user_dict = { "labs_user_id" => decrypt_header(request.headers["X-Auth-Newseye-Token"])[0],
+ # "labs_user_name" => decrypt_header(request.headers["X-Auth-Newseye-Token"])[1] }
+ user_dict = { "labs_user_id" => "42", "labs_user_name" => "dummyuser" }
+ @user = User.find_by(labs_user_id: user_dict["labs_user_id"])
+ if @user
+ if !session[:current_user_id]
+ # puts "Logging in the user since current_user_id was not set"
+ login @user
+ end
+ else
+ @user = User.new(user_dict)
+ if @user.save
+ login @user
+ end
+ end
+ end
+
+ def login(user)
+ reset_session
+ session[:current_user_id] = user.id
+ # puts "The user id taken from session is " + String(session[:current_user_id])
+ end
+
+ private
+
+ def current_user
+ # Current.user ||= User.find_by(labs_user_id: decrypt_header(request.headers["X-Auth-Newseye-Token"])[0])
+ Current.user ||= User.find_by(labs_user_id: "42")
+ end
+
+ def decrypt_header(token)
+ private_key = ENV["NEP_AUTH_PRIVATE_KEY"] || "OFE_GQ8Ri8MX-0rH_T0e9ZFIhy-q0n2VxBWPoOyJ1I0="
+ unpacked_key = Base64.urlsafe_decode64(private_key)
+ signing_key = unpacked_key[0..15]
+ encryption_key = unpacked_key[16..32]
+ begin
+ unpacked_token = Base64.urlsafe_decode64(token)
+ version = unpacked_token[0]
+ timestamp = unpacked_token[1..8]
+ iv = unpacked_token[9..24]
+ ciphertext = unpacked_token[25..-33]
+ hmac = unpacked_token[-32..-1]
+ computed_hmac = OpenSSL::HMAC.digest("SHA256", signing_key, unpacked_token[0..-33])
+ if OpenSSL.fixed_length_secure_compare(version, "\x80") &&
+ (timestamp.unpack("Q>")[0] - Time.now.to_i).abs < 3600 &&
+ OpenSSL.fixed_length_secure_compare(hmac, computed_hmac)
+ # all good
+ # puts "Token authenticated"
+ else
+ # do something now
+ raise "Invalid Token"
+ end
+ d = OpenSSL::Cipher.new("AES-128-CBC")
+ d.decrypt
+ d.key = encryption_key
+ d.iv = iv
+ plain = d.update(ciphertext) + d.final
+ token_tuple = plain.split(",", 2)
+ rescue => e
+ # do something now
+ puts e.message, e.backtrace
+ raise "Invalid Token"
+ end
+ end
+end
diff --git a/app/controllers/dataset_controller.rb b/app/controllers/dataset_controller.rb
index e04f4b4e1265ad806aea318f755e5a70647f3a1f..2478c32a563f2ee0569fe43d85661aefa40cc5d5 100644
--- a/app/controllers/dataset_controller.rb
+++ b/app/controllers/dataset_controller.rb
@@ -1,194 +1,215 @@
class DatasetController < ApplicationController
-
- before_action :authenticate_user!
-
- ##
- # List all datasets
- def index
- end
-
- ##
- # Display a single dataset
- def show
- @dataset = Dataset.find(params[:id])
- @current_page = params[:page] || 1
- @per_page = params[:per_page] || 10
- session[:working_dataset] = @dataset.id
- end
-
- ##
- # Create a new empty dataset
- def create_dataset
- dataset = Dataset.new
- dataset.user = current_user
- dataset.title = params[:title]
- begin
- dataset.save!
- render json: {status: 'ok'}
- rescue ActiveRecord::RecordNotUnique
- render json: {status: "error", message: "A dataset with this title already exists."}
- rescue ActiveRecord::RecordInvalid
- render json: {status: "error", message: "The title should not be blank."}
- end
- end
-
- ##
- # Rename an existing dataset
- def rename_dataset
- dataset = Dataset.find(params[:id])
- dataset.title = params[:title]
+ before_action :authenticate_user!
+
+ ##
+ # List all datasets
+ def index
+ # puts "Listing datasets"
+ # puts session.inspect
+ end
+
+ ##
+ # Display a single dataset
+ def show
+ # puts "Finding dataset with id " + String(params[:id])
+ @dataset = Dataset.find(params[:id])
+ @current_page = params[:page] || 1
+ @per_page = params[:per_page] || 10
+ @nb_pages = params[:nb_pages] || (@dataset.nb_articles / @per_page).to_i + 1
+ session[:working_dataset] = @dataset.id
+ # puts "The session now has working_dataset " + String(session[:working_dataset])
+ end
+
+ ##
+ # Create a new empty dataset
+ def create_dataset
+ dataset = Dataset.new
+ dataset.user = current_user
+ dataset.title = params[:title]
+ begin
+ dataset.save!
+ render json: { status: "ok" }
+ rescue ActiveRecord::RecordNotUnique
+ render json: { status: "error", message: "A dataset with this title already exists." }
+ rescue ActiveRecord::RecordInvalid
+ render json: { status: "error", message: "The title should not be blank." }
+ end
+ end
+
+ ##
+ # Rename an existing dataset
+ def rename_dataset
+ dataset = Dataset.find(params[:id])
+ dataset.title = params[:title]
+ begin
+ dataset.save!
+ render json: { status: "ok" }
+ rescue ActiveRecord::RecordNotUnique
+ render json: { status: "error", message: "A dataset with this title already exists." }
+ rescue ActiveRecord::RecordInvalid
+ render json: { status: "error", message: "The title should not be blank." }
+ end
+ end
+
+ ##
+ # Import a public dataset
+ def import_dataset
+ to_copy = Dataset.find params[:original_dataset_id]
+ render json: { status: "error", message: "This dataset is not public." } unless to_copy.public?
+ new_dataset = Dataset.new
+ new_dataset.user_id = current_user.id
+ new_dataset.title = params[:title]
+ to_copy.documents.each do |doc|
+ if doc["type"] == "compound"
+ ca = CompoundArticle.find(doc["id"]).dup
+ ca.user = current_user
begin
- dataset.save!
- render json: {status: 'ok'}
+ ca.save!
rescue ActiveRecord::RecordNotUnique
- render json: {status: "error", message: "A dataset with this title already exists."}
- rescue ActiveRecord::RecordInvalid
- render json: {status: "error", message: "The title should not be blank."}
+ ca.title = "_#{(0...8).map { (65 + rand(26)).chr }.join}_#{ca.title}"
+ ca.save!
end
- end
-
- ##
- # Import a public dataset
- def import_dataset
- to_copy = Dataset.find params[:original_dataset_id]
- render json: {status: "error", message: "This dataset is not public."} unless to_copy.public?
- new_dataset = Dataset.new
- new_dataset.user_id = current_user.id
- new_dataset.title = params[:title]
- to_copy.documents.each do |doc|
- if doc['type'] == "compound"
- ca = CompoundArticle.find(doc['id']).dup
- ca.user = current_user
- begin
- ca.save!
- rescue ActiveRecord::RecordNotUnique
- ca.title = "_#{(0...8).map { (65 + rand(26)).chr }.join}_#{ca.title}"
- ca.save!
- end
- new_dataset.documents << {id: ca.id, type: "compound"}
- else
- new_dataset.documents << doc
- end
- end
- begin
- new_dataset.save!
- render json: {status: 'ok'}
- rescue ActiveRecord::RecordNotUnique
- render json: {status: "error", message: "A dataset with this title already exists."}
- rescue ActiveRecord::RecordInvalid
- render json: {status: "error", message: "The title should not be blank."}
- end
- end
-
- ##
- # Delete an existing dataset
- def delete_dataset
- dataset = Dataset.find(params[:dataset_id])
- dataset_id = dataset.id
- dataset.destroy
- if session[:working_dataset] == dataset_id
- if current_user.datasets.first
- session[:working_dataset] = current_user.datasets.first.id
- else
- session[:working_dataset] = nil
- end
- end
- end
-
- ##
- # Update the view of the list of datasets
- def update_datasets_list
- respond_to do |format|
- format.js
- end
- end
-
- ##
- #
- def set_working_dataset
- session[:working_dataset] = params[:dataset_id]
- @title = Dataset.find(session[:working_dataset]).title
- respond_to do |format|
- format.js
- end
- end
-
- def add_selected_documents
- out = {}
- @nb_added_docs = params[:documents_ids].size
- dataset = Dataset.find(session[:working_dataset])
- existing = dataset.add_documents params[:documents_ids] # Add docs and return existing ids
- @nb_added_docs -= existing.size
- title = dataset.title
- message = "
#{@nb_added_docs} document#{@nb_added_docs > 1 ? "s were" : " was"} added to your dataset.
"
- message.concat "#{existing.size} document#{existing.size > 1 ? "s" : ""} already exist in this dataset.
" unless existing.empty?
- # render partial: "shared/notification", locals: {notif_title: title, notif_content: message.html_safe}
- out['notif'] = render_to_string layout: false, partial: "shared/notification", locals: {notif_title: title, notif_content: message.html_safe}
- out['nbissues'] = dataset.documents.select{|d| d['type'] == "issue" }.size
- out['nbarticles'] = dataset.documents.select{|d| d['type'] == "article" }.size
- out['nbdocs'] = out['nbissues'] + out['nbarticles']
- out['title'] = title
- out['results_datasets'] = params[:documents_ids].map{ |docid| [docid, render_to_string(layout: false, partial: 'catalog/result_datasets', locals: {doc_id: docid})] }.to_h
- render json: out
- end
-
- def add_compound
- out = {}
- dataset = Dataset.find(session[:working_dataset])
- existing = dataset.add_compound params[:compound_id] # Add docs and return existing ids
- title = dataset.title
- message = " The compound article was added to your dataset.
"
- out['notif'] = render_to_string layout: false, partial: "shared/notification", locals: {notif_title: title, notif_content: message.html_safe}
- out['nbissues'] = dataset.documents.select{|d| d['type'] == "issue" }.size
- out['nbarticles'] = dataset.documents.select{|d| d['type'] == "article" }.size
- out['nbcompounds'] = dataset.documents.select{|d| d['type'] == "compound" }.size
- out['nbdocs'] = out['nbissues'] + out['nbarticles'] + out['nbcompounds']
- out['title'] = title
- render json: out
- end
-
- def remove_selected_documents
- @nb_removed_docs = params[:documents_ids].size
- dataset = Dataset.find(session[:working_dataset])
- dataset.remove_documents params[:documents_ids]
- redirect_to action: "show", id: dataset.id
- end
-
- def add_all_documents
- SearchToDatasetWorker.perform_async(current_user.id, session[:working_dataset], params[:search_params].to_unsafe_h)
- title = Dataset.find(session[:working_dataset]).title
- message = "Documents are being added to your dataset. You will be notified when the operation is done.
"
- render partial: "shared/notification", locals: {notif_title: title, notif_content: message.html_safe}
- end
-
- def export_dataset
- ExportDatasetWorker.perform_async(current_user.id, params[:dataset_id], params[:export_type])
- title = Dataset.find(params[:dataset_id]).title
- message = "The export is being prepared. You will be notified when the operation is done.
"
- render partial: "shared/notification", locals: {notif_title: title, notif_content: message.html_safe}
- end
-
- def toggle_sharing_status
- @dataset = Dataset.find(params[:dataset_id])
- @dataset.toggle!(:public)
- render partial: 'dataset_info'
- end
-
- def paginate
- out = {}
- d = Dataset.find params['id']
- rows = params[:per_page].to_i
- res = d.fetch_paginated_documents(params[:page].to_i, rows, params[:sort], params[:sort_order], params[:type])
- out[:documents] = render_to_string(layout: false,
- partial: "documents",
- locals: {docs: res[:docs], rows: rows, pagenum: params[:page].to_i})
- out[:pagination] = render_to_string(layout: false,
- partial: "pagination",
- locals: {nb_pages: res[:nb_pages].to_i, current_page: params[:page].to_i})
- render json: out
- end
-
- def list_datasets
- render json: current_user.datasets.to_json
- end
+ new_dataset.documents << { id: ca.id, type: "compound" }
+ else
+ new_dataset.documents << doc
+ end
+ end
+ begin
+ new_dataset.save!
+ render json: { status: "ok" }
+ rescue ActiveRecord::RecordNotUnique
+ render json: { status: "error", message: "A dataset with this title already exists." }
+ rescue ActiveRecord::RecordInvalid
+ render json: { status: "error", message: "The title should not be blank." }
+ end
+ end
+
+ ##
+ # Delete an existing dataset
+ def delete_dataset
+ dataset = Dataset.find(params[:dataset_id])
+ dataset_id = dataset.id
+ dataset.destroy
+ if session[:working_dataset] == dataset_id
+ if current_user.datasets.first
+ session[:working_dataset] = current_user.datasets.first.id
+ else
+ session[:working_dataset] = nil
+ end
+ end
+ end
+
+ ##
+ # Update the view of the list of datasets
+ def update_datasets_list
+ respond_to do |format|
+ format.js
+ end
+ end
+
+ ##
+ #
+ def set_working_dataset
+ session[:working_dataset] = params[:dataset_id]
+ @title = Dataset.find(session[:working_dataset]).title
+ # puts "Setting working dataset to " + String(session[:working_dataset])
+ # puts session.inspect
+ respond_to do |format|
+ format.js
+ end
+ end
+
+ def add_selected_documents
+ # puts session.inspect
+ # puts "User id from session is " + String(session[:current_user_id]) + " and working dataset is " + String(session[:working_dataset])
+ out = {}
+ @nb_added_docs = params[:documents_ids].size
+ dataset = Dataset.find(session[:working_dataset])
+ existing = dataset.add_documents params[:documents_ids] # Add docs and return existing ids
+ @nb_added_docs -= existing.size
+ title = dataset.title
+ message = " #{@nb_added_docs} document#{@nb_added_docs > 1 ? "s were" : " was"} added to your dataset.
"
+ message.concat "#{existing.size} document#{existing.size > 1 ? "s" : ""} already exist in this dataset.
" unless existing.empty?
+ # render partial: "shared/notification", locals: {notif_title: title, notif_content: message.html_safe}
+ out["notif"] = render_to_string layout: false, partial: "shared/notification", locals: { notif_title: title, notif_content: message.html_safe, notif_autohide: "true" }
+ out["nbissues"] = dataset.documents.select { |d| d["type"] == "issue" }.size
+ out["nbarticles"] = dataset.documents.select { |d| d["type"] == "article" }.size
+ out["nbdocs"] = out["nbissues"] + out["nbarticles"]
+ out["title"] = title
+ out["results_datasets"] = params[:documents_ids].map { |docid| [docid, render_to_string(layout: false, partial: "catalog/result_datasets", locals: { doc_id: docid })] }.to_h
+ render json: out
+ end
+
+ def add_compound
+ out = {}
+ dataset = Dataset.find(session[:working_dataset])
+ existing = dataset.add_compound params[:compound_id] # Add docs and return existing ids
+ title = dataset.title
+ message = " The compound article was added to your dataset.
"
+ out["notif"] = render_to_string layout: false, partial: "shared/notification", locals: { notif_title: title, notif_content: message.html_safe, notif_autohide: "true" }
+ out["nbissues"] = dataset.documents.select { |d| d["type"] == "issue" }.size
+ out["nbarticles"] = dataset.documents.select { |d| d["type"] == "article" }.size
+ out["nbcompounds"] = dataset.documents.select { |d| d["type"] == "compound" }.size
+ out["nbdocs"] = out["nbissues"] + out["nbarticles"] + out["nbcompounds"]
+ out["title"] = title
+ render json: out
+ end
+
+ def remove_selected_documents
+ @nb_removed_docs = params[:documents_ids].size
+ dataset = Dataset.find(session[:working_dataset])
+ dataset.remove_documents params[:documents_ids]
+ respond_to do |format|
+ format.js {render inline: "location.reload();" } # https://stackoverflow.com/questions/7465259/how-can-i-reload-the-current-page-in-ruby-on-rails
+ end
+ end
+
+ def add_all_documents
+ time = Time.now.to_i
+ SearchToDatasetWorker.perform_async(current_user.id, session[:working_dataset], params[:search_params].to_unsafe_h, time)
+ title = Dataset.find(session[:working_dataset]).title
+ message = "Documents are being added to your dataset.
"
+ render partial: "shared/notification", locals: { notif_title: title, notif_content: message.html_safe, notif_autohide: "false" }
+ end
+
+ def export_dataset
+ time = Time.now.to_i
+ ExportDatasetWorker.perform_async(current_user.id, params[:dataset_id], params[:export_type], time)
+ title = Dataset.find(params[:dataset_id]).title
+ message = "The export is being prepared.
"
+ render partial: "shared/notification", locals: { notif_title: title, notif_content: message.html_safe, notif_autohide: "false" }
+ end
+
+ def toggle_sharing_status
+ @dataset = Dataset.find(params[:dataset_id])
+ @dataset.toggle!(:public)
+ render partial: "dataset_info"
+ end
+
+ def paginate
+ out = {}
+ d = Dataset.find params["id"]
+ rows = params[:per_page].to_i
+ res = d.fetch_paginated_documents(params[:page].to_i, rows, params[:sort], params[:sort_order], params[:type])
+ out[:documents] = render_to_string(layout: false,
+ partial: "documents",
+ locals: { docs: res[:docs], rows: rows, pagenum: params[:page].to_i })
+ out[:pagination] = render_to_string(layout: false,
+ partial: "pagination",
+ locals: { nb_pages: res[:nb_pages].to_i, current_page: params[:page].to_i })
+ out[:nb_pages] = res[:nb_pages]
+ render json: out
+ end
+
+ def list_datasets
+ render json: current_user.datasets.to_json
+ end
end
diff --git a/app/controllers/experiment_controller.rb b/app/controllers/experiment_controller.rb
index 5c89c48070c3dc380564f8752cb3399d970b4618..61d9bce3e41c0894c275c4b3bebd612b2fe9b1eb 100644
--- a/app/controllers/experiment_controller.rb
+++ b/app/controllers/experiment_controller.rb
@@ -1,118 +1,117 @@
class ExperimentController < ApplicationController
+ before_action :authenticate_user!
- before_action :authenticate_user!
+ def index
+ end
- def index
+ def create
+ experiment = Experiment.new
+ experiment.user = current_user
+ experiment.title = params[:title]
+ begin
+ experiment.save!
+ render json: { status: "ok" }
+ rescue ActiveRecord::RecordNotUnique
+ render json: { status: "error", message: "An experiment with this title already exists." }
+ rescue ActiveRecord::RecordInvalid
+ render json: { status: "error", message: "The title should not be blank." }
end
+ end
- def create
- experiment = Experiment.new
- experiment.user = current_user
- experiment.title = params[:title]
- begin
- experiment.save!
- render json: {status: 'ok'}
- rescue ActiveRecord::RecordNotUnique
- render json: {status: "error", message: "An experiment with this title already exists."}
- rescue ActiveRecord::RecordInvalid
- render json: {status: "error", message: "The title should not be blank."}
- end
+ def delete
+ experiment = Experiment.find(params[:experiment_id])
+ root_ids = experiment.description["children"].map { |root| root["tool"]["id"] }
+ root_ids.each do |root_id|
+ Tool.destroy(experiment.delete_tool(root_id))
end
+ experiment.destroy
+ end
- def delete
- experiment = Experiment.find(params[:experiment_id])
- root_ids = experiment.description["children"].map{|root| root['tool']['id'] }
- root_ids.each do |root_id|
- Tool.destroy(experiment.delete_tool(root_id))
- end
- experiment.destroy
- end
-
- def show
- @experiment = Experiment.find params[:id]
- @tools = @experiment.load_tools
- @tools = JSON.parse(File.read("#{Rails.root}/lib/newspapers_tools.json"))
- @tools['tools']['processors'].delete_if{ |h| h["type"] == "splitter" }
- end
+ def show
+ @experiment = Experiment.find params[:id]
+ @tools = @experiment.load_tools
+ @tools = JSON.parse(File.read("#{Rails.root}/lib/newspapers_tools.json"))
+ @tools["tools"]["processors"].delete_if { |h| h["type"] == "splitter" }
+ end
- def update_experiments_list
- respond_to do |format|
- format.js
- end
+ def update_experiments_list
+ respond_to do |format|
+ format.js
end
+ end
- def add_tool
- @experiment = Experiment.find(params[:id])
- tool_params = JSON.parse params[:tool]
- tool = Tool.new
- tool.tool_type = tool_params['type']
- tool.input_type = tool_params['input_type']
- tool.output_type = tool_params['output_type']
- tool.parameters = tool_params['parameters']
- tool.status = "created"
- tool.parent_id = params[:parent_id]#(params[:parent_id] == "") ? nil : Tool.find(params[:parent_id])
- tool.experiment = @experiment
- tool.save!
- @experiment.add_tool(params[:parent_id].to_i, tool)
- @experiment.save!
- render 'experiment/update_experiment_area'
- end
+ def add_tool
+ @experiment = Experiment.find(params[:id])
+ tool_params = JSON.parse params[:tool]
+ tool = Tool.new
+ tool.tool_type = tool_params["type"]
+ tool.input_type = tool_params["input_type"]
+ tool.output_type = tool_params["output_type"]
+ tool.parameters = tool_params["parameters"]
+ tool.status = "created"
+ tool.parent_id = params[:parent_id] #(params[:parent_id] == "") ? nil : Tool.find(params[:parent_id])
+ tool.experiment = @experiment
+ tool.save!
+ @experiment.add_tool(params[:parent_id].to_i, tool)
+ @experiment.save!
+ render "experiment/update_experiment_area"
+ end
- def delete_tool
- @experiment = Experiment.find(params[:id])
- tools_to_destroy_ids = @experiment.delete_tool(params[:tool_id].to_i)
- @experiment.save!
- Tool.destroy(tools_to_destroy_ids)
- render 'experiment/update_experiment_area'
- end
+ def delete_tool
+ @experiment = Experiment.find(params[:id])
+ tools_to_destroy_ids = @experiment.delete_tool(params[:tool_id].to_i)
+ @experiment.save!
+ Tool.destroy(tools_to_destroy_ids)
+ render "experiment/update_experiment_area"
+ end
- def edit_tool_form
- @tool = Tool.find(params[:tool_id])
- render partial: 'tool/parameters', locals: {tool: @tool}
- end
+ def edit_tool_form
+ @tool = Tool.find(params[:tool_id])
+ render partial: "tool/parameters", locals: { tool: @tool }
+ end
- def edit_tool
- @experiment = Experiment.find(params[:id])
- @tool = Tool.find(params[:tool_id])
- modified = false
- @tool.parameters.map! do |param|
- if param['value'] != params[:parameters][param['name']]
- modified = true
- end
- param['value'] = params[:parameters][param['name']]
- param
- end
- @tool.status = "configured" if modified
- @tool.save!
- render 'experiment/update_experiment_area'
+ def edit_tool
+ @experiment = Experiment.find(params[:id])
+ @tool = Tool.find(params[:tool_id])
+ modified = false
+ @tool.parameters.map! do |param|
+ if param["value"] != params[:parameters][param["name"]]
+ modified = true
+ end
+ param["value"] = params[:parameters][param["name"]]
+ param
end
+ @tool.status = "configured" if modified
+ @tool.save!
+ render "experiment/update_experiment_area"
+ end
- def tool_results
- @experiment = Experiment.find(params[:id])
- @tool = Tool.find(params[:tool_id])
- render partial: 'tool/results', locals: {tool: @tool, experiment: @experiment}
- end
+ def tool_results
+ @experiment = Experiment.find(params[:id])
+ @tool = Tool.find(params[:tool_id])
+ render partial: "tool/results", locals: { tool: @tool, experiment: @experiment }
+ end
- def run_tool
- @experiment = Experiment.find(params[:id])
- @tool = Tool.find(params[:tool_id])
- @tool.run()
- render 'experiment/update_experiment_area'
- end
+ def run_tool
+ @experiment = Experiment.find(params[:id])
+ @tool = Tool.find(params[:tool_id])
+ @tool.run()
+ render "experiment/update_experiment_area"
+ end
- def run_experiment
- out = {}
- @experiment = Experiment.find(params[:experiment_id])
- ids = @experiment.get_tool_ids
- running = false
- ids.map{|id| Tool.find(id)}.each do |tool|
- if tool.runnable?
- tool.run(true)
- running = true
- end
- end
- out[:html_tree] = render_to_string partial: "tree", locals: {experiment: @experiment}
- out[:experiment_running] = running
- render json: out
+ def run_experiment
+ out = {}
+ @experiment = Experiment.find(params[:experiment_id])
+ ids = @experiment.get_tool_ids
+ running = false
+ ids.map { |id| Tool.find(id) }.each do |tool|
+ if tool.runnable?
+ tool.run(true)
+ running = true
+ end
end
+ out[:html_tree] = render_to_string partial: "tree", locals: { experiment: @experiment }
+ out[:experiment_running] = running
+ render json: out
+ end
end
diff --git a/app/controllers/notification_controller.rb b/app/controllers/notification_controller.rb
index 53306f026c34a52b604737965d3f45069ccf4ab3..43e7a448386440766ba963f829149080605773cf 100644
--- a/app/controllers/notification_controller.rb
+++ b/app/controllers/notification_controller.rb
@@ -1,3 +1,3 @@
class NotificationController < ApplicationController
-
-end
\ No newline at end of file
+ before_action :authenticate_user!
+end
diff --git a/app/controllers/tool_controller.rb b/app/controllers/tool_controller.rb
index d9fbf32fe8fd70f9fdd4e0191763b1bc5e9a62ff..2e047a96589e49ddef709a411faa2b229222ecab 100644
--- a/app/controllers/tool_controller.rb
+++ b/app/controllers/tool_controller.rb
@@ -1,26 +1,21 @@
class ToolController < ApplicationController
+ before_action :authenticate_user!
- before_action :authenticate_user!
+ def show
+ end
- def show
+ def create
+ end
- end
+ def update
+ end
- def create
+ def destroy
+ end
- end
+ private
- def update
-
- end
-
- def destroy
-
- end
-
- private
-
- def tool_params
- params.require(:tool).permit(:parameters, :results, :status)
- end
+ def tool_params
+ params.require(:tool).permit(:parameters, :results, :status)
+ end
end
diff --git a/app/helpers/application_helper.rb b/app/helpers/application_helper.rb
index a69ed066bcadd2cfc54e470d06f04d076e83561c..15f2511626a73c1182080ec88be05629588196fc 100644
--- a/app/helpers/application_helper.rb
+++ b/app/helpers/application_helper.rb
@@ -1,7 +1,5 @@
module ApplicationHelper
-
- def set_page_title(title)
- content_for :page_title, title
- end
-
+ def set_page_title(title)
+ content_for :page_title, title
+ end
end
diff --git a/app/helpers/experiment_helper.rb b/app/helpers/experiment_helper.rb
index f03f142c96df04bd436fb1db1f7329e956f801bd..9bc2e4cf8fe718cfc7a8567a48f6f3bba357c6e5 100644
--- a/app/helpers/experiment_helper.rb
+++ b/app/helpers/experiment_helper.rb
@@ -1,17 +1,15 @@
module ExperimentHelper
-
- def recursive_display(tree, tools)
- if tree.has_key? "tool"
- concat "".html_safe
- concat render partial: 'tool/canvas_tool', locals: {tool: tools[tree['tool']['id']]}
- concat "".html_safe
- end
- tree['children'].each do |node|
- recursive_display(node, tools)
- end
- concat '
'.html_safe
- concat " ".html_safe
- concat " ".html_safe if tree.has_key? "tool"
+ def recursive_display(tree, tools)
+ if tree.has_key? "tool"
+ concat "".html_safe
+ concat render partial: "tool/canvas_tool", locals: { tool: tools[tree["tool"]["id"]] }
+ concat "".html_safe
end
-
-end
\ No newline at end of file
+ tree["children"].each do |node|
+ recursive_display(node, tools)
+ end
+ concat '
'.html_safe
+ concat " ".html_safe
+ concat " ".html_safe if tree.has_key? "tool"
+ end
+end
diff --git a/app/helpers/named_entities_helper.rb b/app/helpers/named_entities_helper.rb
index 041962362c6aafc418b8e93d6ee4b621aa95267b..e9c2536ec0acb2ac3a1da76a767c531ab6b5d30c 100644
--- a/app/helpers/named_entities_helper.rb
+++ b/app/helpers/named_entities_helper.rb
@@ -1,40 +1,38 @@
module NamedEntitiesHelper
-
- def get_linked_entities entities
- priority_language = [I18n.locale, 'en', 'de', 'fr', 'fi', 'sv']
- ids = entities.select{ |label| label != "" && label != nil }
- return {} if ids.empty?
- out = {}
- SolrSearcher.query({q: "*:*", fq: "id:(#{ids.join(' ')})", fl: "*", rows: 99999})['response']['docs'].map do |res|
- priority_language.each do |lang|
- unless res["label_#{lang}_ssi"].nil?
- out[res['id']] = {kb_url: res['kb_url_ssi'], label: res["label_#{lang}_ssi"]}
- break
- end
- end
+ def get_linked_entities(entities)
+ priority_language = [I18n.locale, "en", "de", "fr", "fi", "sv"]
+ ids = entities.select { |label| label != "" && label != nil }
+ return {} if ids.empty?
+ out = {}
+ SolrSearcher.query({ q: "*:*", fq: "id:(#{ids.join(" ")})", fl: "*", rows: 99999 })["response"]["docs"].map do |res|
+ priority_language.each do |lang|
+ unless res["label_#{lang}_ssi"].nil?
+ out[res["id"]] = { kb_url: res["kb_url_ssi"], label: res["label_#{lang}_ssi"] }
+ break
end
- out
+ end
end
+ out
+ end
- def get_entity_label(options={})
- priority_language = [I18n.locale, 'en', 'de', 'fr', 'fi', 'sv']
- if options.class == Array
- out = {}
- unless options.empty?
- docs = SolrSearcher.query({q: "*:*", fq: "id:(#{options.join(' ')})", fl: "*", rows: 99999})['response']['docs']
- docs.map do |doc|
- priority_language.each do |lang|
- unless doc["label_#{lang}_ssi"].nil?
- out[doc['id']] = doc["label_#{lang}_ssi"]
- break
- end
- end
- end
+ def get_entity_label(options = {})
+ priority_language = [I18n.locale, "en", "de", "fr", "fi", "sv"]
+ if options.class == Array
+ out = {}
+ unless options.empty?
+ docs = SolrSearcher.query({ q: "*:*", fq: "id:(#{options.join(" ")})", fl: "*", rows: 99999 })["response"]["docs"]
+ docs.map do |doc|
+ priority_language.each do |lang|
+ unless doc["label_#{lang}_ssi"].nil?
+ out[doc["id"]] = doc["label_#{lang}_ssi"]
+ break
end
- return out
- else
- @entities_labels[options] # set in catalog_controller#index
+ end
end
+ end
+ return out
+ else
+ @entities_labels[options] # set in catalog_controller#index
end
-
-end
\ No newline at end of file
+ end
+end
diff --git a/app/helpers/search_helper.rb b/app/helpers/search_helper.rb
index 8660013d7826fc10e504bbde28bb13173970f449..e9eb2acf15429dc437fc7427d3f7acad673c105b 100644
--- a/app/helpers/search_helper.rb
+++ b/app/helpers/search_helper.rb
@@ -1,37 +1,35 @@
module SearchHelper
+ def current_page_params
+ params.to_unsafe_h.slice("q", "page", "per_page", "sort", "search_type", "f")
+ end
- def current_page_params
- params.to_unsafe_h.slice('q', 'page', 'per_page','sort', 'f')
+ def merge_facets(parameters, new)
+ parameters.merge(new) do |key, oldval, newval|
+ oldval.merge(newval)
end
+ end
- def merge_facets(parameters, new)
- parameters.merge(new) do |key, oldval, newval|
- oldval.merge(newval)
- end
- end
-
- def convert_solr_date_to_datepicker_date solr_date
- DateTime.parse(solr_date).strftime("%Y-%m-%d")
- end
+ def convert_solr_date_to_datepicker_date(solr_date)
+ DateTime.parse(solr_date).strftime("%Y-%m-%d")
+ end
- def convert_datepicker_date_to_solr_date solr_date
- DateTime.parse(solr_date).strftime("%Y-%m-%d")
- end
+ def convert_datepicker_date_to_solr_date(solr_date)
+ DateTime.parse(solr_date).strftime("%Y-%m-%d")
+ end
- def search_constraints
- constraints = []
- if current_page_params[:f]
- current_page_params[:f].each do |f, vals|
- if f == "date_created_dtsi"
- constraints << {label: f, value: "From #{vals['from']} To #{vals['to']}"}
- else
- vals.each do |val|
- constraints << {label: f, value: val}
- end
- end
- end
+ def search_constraints
+ constraints = []
+ if current_page_params[:f]
+ current_page_params[:f].each do |f, vals|
+ if f == "date_created_dtsi"
+ constraints << { label: f, value: "From #{vals["from"]} To #{vals["to"]}" }
+ else
+ vals.each do |val|
+ constraints << { label: f, value: val }
+ end
end
- constraints
+ end
end
-
-end
\ No newline at end of file
+ constraints
+ end
+end
diff --git a/app/javascript/channels/notification_channel.js b/app/javascript/channels/notification_channel.js
index 7d47346856862f2ad4bea953eee09ea92bf38f70..031fd2e0092a46629c652d72d6a82675c407e632 100644
--- a/app/javascript/channels/notification_channel.js
+++ b/app/javascript/channels/notification_channel.js
@@ -19,7 +19,7 @@ consumer.subscriptions.create("NotificationChannel", {
$("#experiment_area").attr("data-refresh", (!$("#experiment_area").attr("data-refresh")))
break
case "notify":
- if(window.location.pathname == "/search") {
+ if(window.location.pathname.endsWith("/search")) {
const selected_dataset = $("#working_dataset_select").val()
$("#working_dataset_select").html(data.dataset_options)
$("#working_dataset_select").val(selected_dataset)
@@ -35,12 +35,22 @@ consumer.subscriptions.create("NotificationChannel", {
}
break
case "completion_rate":
- if(window.location.pathname == `/experiment/${data.experiment_id}`) {
+ if(window.location.pathname.endsWith(`/experiment/${data.experiment_id}`)) {
const progress_bar = $(`#tool_${data.tool_id}`).find(".completion-rate").find('.progress-bar')
progress_bar.attr("style", `width: ${data.completion}%;`)
progress_bar.attr("aria-valuenow", data.completion)
progress_bar.html(`${data.completion}%`)
}
+ if(window.location.pathname.endsWith("/search") || window.location.pathname.endsWith("/dataset/".concat(data.dataset_id))) {
+ const progress_bar = $("#progress-".concat(data.dataset_id).concat(data.time)).find('.progress-bar')
+ progress_bar.attr("style", `width: ${data.completion}%;`)
+ progress_bar.attr("aria-valuenow", data.completion)
+ progress_bar.html(`${data.completion}%`)
+
+ if(data.completion == 100) {
+ progress_bar.closest(".toast").hide(2000);
+ }
+ }
break
case "experiment_finished":
// $("#experiment_status").html(data.message)
diff --git a/app/javascript/images/info_symbol.png b/app/javascript/images/info_symbol.png
new file mode 100644
index 0000000000000000000000000000000000000000..643937fc18081d0e3ba719f544f9c7e938829707
Binary files /dev/null and b/app/javascript/images/info_symbol.png differ
diff --git a/app/javascript/packs/application.js b/app/javascript/packs/application.js
index b7e5812143d9846dfee89513c62df390be84d690..a1915f29f4babb35d9f4bdbfd6781ae21c087ccc 100644
--- a/app/javascript/packs/application.js
+++ b/app/javascript/packs/application.js
@@ -27,5 +27,15 @@ window.$ = $
window.bootstrap = bootstrap
window.Panzoom = require('@panzoom/panzoom')
import "./application.scss"
+import "./stylesheets/catalog.scss";
const images = require.context('../images', true)
import Chart from 'chart.js/auto'
+
+export function addPrefixURL() {
+ var prefix = document.querySelector('#newspaper-platform');
+ if (prefix != null) {
+ return prefix.dataset.prefix;
+ } else {
+ return "";
+ }
+}
diff --git a/app/javascript/packs/application.scss b/app/javascript/packs/application.scss
index 9a4a0e936edbfceb45e5e432c04a7eb0596f8ce2..d3aad5fd2beb3df7a16e8460a4982e1bd3553374 100644
--- a/app/javascript/packs/application.scss
+++ b/app/javascript/packs/application.scss
@@ -1,3 +1,20 @@
+$labs-green: hsl(180, 15%, 30%);
+$dark-labs-green: hsl(180, 5%, 35%);
+$theme-colors: ( // Overwrite bootstrap colors
+ 'primary': $labs-green,
+ 'secondary': #747474,
+ 'success': $dark-labs-green,
+ 'danger': lighten($labs-green, 20%),
+ 'warning': lighten($labs-green, 25%),
+ 'info': lighten($labs-green, 30%),
+ 'light': white,
+ 'dark': #525e69);
+$link-color: $labs-green;
+$link-hover-color: darken($labs-green, 10%) !default;
+$component-active-bg: $labs-green;
+$card-color: black;
+$border-color: #69737e;
+
$fa-font-path: '../../../node_modules/@fortawesome/fontawesome-free/webfonts';
@import '~@fortawesome/fontawesome-free/scss/fontawesome';
@@ -9,5 +26,3 @@ $fa-font-path: '../../../node_modules/@fortawesome/fontawesome-free/webfonts';
@import '~bootstrap/scss/bootstrap';
@import "~treeflex/dist/css/treeflex";
-
-@import "./stylesheets/catalog.scss";
\ No newline at end of file
diff --git a/app/javascript/packs/controllers/dataset_controller.js b/app/javascript/packs/controllers/dataset_controller.js
index 08ec8bc06671cacd3d090575edefb45a20995376..c183ae62609583b502376757a4ff61a2d624c0d4 100644
--- a/app/javascript/packs/controllers/dataset_controller.js
+++ b/app/javascript/packs/controllers/dataset_controller.js
@@ -3,8 +3,8 @@ import {DatasetAPI} from "../utils/dataset_api"
import {SearchAPI} from "../utils/search_api";
export default class extends Controller {
- static targets = [ ]
- static values = { id: Number, currentPage: Number, perPage: Number, sort: String, sortOrder: String, selectedDocuments: Array }
+ static targets = [ "inputPage" ]
+ static values = { id: Number, currentPage: Number, nbPages: Number, perPage: Number, sort: String, sortOrder: String, selectedDocuments: Array }
connect() {
this.loadDocuments(this.idValue, this.currentPageValue, this.perPageValue, this.sortValue, this.sortOrderValue, "article")
@@ -28,9 +28,17 @@ export default class extends Controller {
}
toggleSharingStatus(event) {
- DatasetAPI.toggleSharingStatus(this.idValue, (data) => {
- document.getElementById("dataset-info").outerHTML= data
- })
+ if (document.getElementById("sharing_status").innerText == "Private") {
+ if (confirm("By clicking OK you agree that your username will be publicly displayed next to your dataset")) {
+ DatasetAPI.toggleSharingStatus(this.idValue, (data) => {
+ document.getElementById("dataset-info").outerHTML = data
+ })
+ }
+ } else {
+ DatasetAPI.toggleSharingStatus(this.idValue, (data) => {
+ document.getElementById("dataset-info").outerHTML = data
+ })
+ }
}
export(event) {
@@ -59,6 +67,7 @@ export default class extends Controller {
DatasetAPI.paginateDataset(datasetId, page, per_page, sort, sort_order, type, (data) => {
$("#documents-list").html(data.documents)
$("#results_navigation").html(data.pagination)
+ this.nbPagesValue = data.nb_pages
})
}
@@ -72,7 +81,7 @@ export default class extends Controller {
event.preventDefault()
if (this.currentPageValue > 1) {
this.currentPageValue--
- this.loadDocuments(this.idValue, this.currentPageValue, this.perPageValue, this.sortValue, this.sortOrderValue, "all")
+ this.loadDocuments(this.idValue, this.currentPageValue, this.perPageValue, this.sortValue, this.sortOrderValue, $("#doctype_selection input:checked").data("doctype"))
}
}
@@ -80,14 +89,22 @@ export default class extends Controller {
event.preventDefault()
if (this.currentPageValue < this.nbPagesValue) {
this.currentPageValue++
- this.loadDocuments(this.idValue, this.currentPageValue, this.perPageValue, this.sortValue, this.sortOrderValue, "all")
+ this.loadDocuments(this.idValue, this.currentPageValue, this.perPageValue, this.sortValue, this.sortOrderValue, $("#doctype_selection input:checked").data("doctype"))
}
}
page_button(event) {
event.preventDefault()
this.currentPageValue = event.target.textContent
- this.loadDocuments(this.idValue, this.currentPageValue, this.perPageValue, this.sortValue, this.sortOrderValue, "all")
+ this.loadDocuments(this.idValue, this.currentPageValue, this.perPageValue, this.sortValue, this.sortOrderValue, $("#doctype_selection input:checked").data("doctype"))
}
+ page_select(event) {
+ event.preventDefault()
+ const input_page = parseInt(this.inputPageTarget.value)
+ if (!isNaN(input_page) && 1 <= input_page && input_page <= this.nbPagesValue && !(this.currentPageValue == input_page)) {
+ this.currentPageValue = input_page
+ this.loadDocuments(this.idValue, this.currentPageValue, this.perPageValue, this.sortValue, this.sortOrderValue, $("#doctype_selection input:checked").data("doctype"))
+ }
+ }
}
\ No newline at end of file
diff --git a/app/javascript/packs/controllers/viewer_controller.js b/app/javascript/packs/controllers/viewer_controller.js
index b3d29725c72bbe1f1d88392b33c88ff98eb9dd92..f03cad12a65eeb5c06bf89cb92127a9f0e51d784 100644
--- a/app/javascript/packs/controllers/viewer_controller.js
+++ b/app/javascript/packs/controllers/viewer_controller.js
@@ -5,7 +5,7 @@ import Sortable from 'sortablejs'
export default class extends Controller {
static targets = ['currentPage', 'articleOverlay', 'selectedArticlePanel', 'addArticleButton', 'addCompoundArticleButton', 'compoundArticlePanel']
- static values = {currentPage: Number, nbpages: Number, pages: Array, articles: Array, selectedArticles: Array, issueId: String, compoundMode: Boolean}
+ static values = {currentPage: Number, nbPages: Number, pages: Array, articles: Array, selectedArticles: Array, issueId: String, compoundMode: Boolean}
isDragged = false
viewer = null
@@ -18,15 +18,20 @@ export default class extends Controller {
this.selectedArticlesValue = []
if (selectedCompoundParam != null) {
const compoundParts = $(`#compound-articles-panel li[data-compound-id="${selectedCompoundParam}"]`).data('parts')
- this.selectedCompound = {id: selectedCompoundParam, parts: compoundParts}
+ const compoundTitle = $(`#compound-articles-panel li[data-compound-id="${selectedCompoundParam}"]`).data('title')
+ this.selectedCompound = {id: selectedCompoundParam, parts: compoundParts, title: compoundTitle}
$(`#compound-articles-panel li[data-compound-id="${selectedCompoundParam}"]`).addClass("active")
+ this.load_named_entities(this.selectedCompound.parts)
+ } else {
+ this.load_named_entities([this.issueIdValue])
}
}
else {
this.selectedArticlesValue = [selectedParam]
+ this.load_named_entities([selectedParam])
}
this.setup_viewer()
- this.load_named_entities([this.issueIdValue])
+
this.setup_mention_click()
this.setup_compound()
this.sortable = new Sortable(document.getElementById("compound_list"), {
@@ -110,7 +115,7 @@ export default class extends Controller {
})
// Compound article selection
$("#compound_articles_list").on("click", "li", (event) => {
- const elt = $(event.target)
+ const elt = $(event.currentTarget)
if(elt.hasClass("active"))
this.unselect_compound_article(elt.data('compoundId'))
else
@@ -121,7 +126,8 @@ export default class extends Controller {
select_compound_article(compoundId) {
const compoundParts = $(`#compound-articles-panel li[data-compound-id="${compoundId}"]`).data('parts')
- this.selectedCompound = {id: compoundId, parts: compoundParts}
+ const compoundTitle = $(`#compound-articles-panel li[data-compound-id="${compoundId}"]`).data('title')
+ this.selectedCompound = {id: compoundId, parts: compoundParts, title: compoundTitle}
$("#compound-articles-panel li").removeClass("active")
$(`#compound-articles-panel li[data-compound-id="${compoundId}"]`).addClass("active")
this.unselectArticles()
@@ -163,7 +169,7 @@ export default class extends Controller {
// Go to article page and select it
let article = this.articlesValue.filter((obj) => { return obj["id"] == articleId})[0]
let pagenum = article.canvases_parts[0]
- pagenum = parseInt(pagenum.substring(pagenum.lastIndexOf('_')+1, pagenum.lastIndexOf("#xywh")))
+ pagenum = parseInt(pagenum.substring(pagenum.lastIndexOf('/')+1, pagenum.lastIndexOf("#xywh")))
// this.viewer.goToPage(pagenum)
// this.viewer.viewport.zoomTo(2)
// this.viewer.viewport.panTo(new OpenSeadragon.Point(loc.x+loc.width/2, loc.y+loc.height/2))
@@ -262,7 +268,7 @@ export default class extends Controller {
const art = this.articlesValue.filter(elt => elt.id == article_id)[0]
return art.all_text.replaceAll("\"", "").replaceAll("\\n", " ")
}).join("\n")
- $(this.selectedArticlePanelTarget).find('h5')[0].innerHTML = ""
+ $(this.selectedArticlePanelTarget).find('h5')[0].innerHTML = this.selectedCompound.title
$(this.selectedArticlePanelTarget).find('p')[0].innerHTML = text
}
else {
@@ -359,7 +365,7 @@ export default class extends Controller {
$(this.addArticleButtonTarget).addClass("d-none")
const first_article_part = this.articlesValue.filter((elt)=>{return elt.id == this.selectedCompound.parts[0]})[0]
const pagenum = first_article_part.canvases_parts[0]
- initialPage = parseInt(pagenum.substring(pagenum.lastIndexOf('_')+1, pagenum.lastIndexOf("#xywh")))-1
+ initialPage = parseInt(pagenum.substring(pagenum.lastIndexOf('/')+1, pagenum.lastIndexOf("#xywh")))-1
}
else {
initialPage = 0
@@ -370,11 +376,11 @@ export default class extends Controller {
else {
$(this.addArticleButtonTarget).removeClass("d-none")
const pagenum = selectedArticleObject.canvases_parts[0]
- initialPage = parseInt(pagenum.substring(pagenum.lastIndexOf('_')+1, pagenum.lastIndexOf("#xywh")))-1
+ initialPage = parseInt(pagenum.substring(pagenum.lastIndexOf('/')+1, pagenum.lastIndexOf("#xywh")))-1
}
this.viewer = OpenSeadragon({
id: "openseadragon_view",
- prefixUrl: "/openseadragon/images/",
+ prefixUrl: "/static/js/openseadragon/images/feathericons/",
sequenceMode: true,
initialPage: initialPage,
tileSources: this.pagesValue,
@@ -405,7 +411,7 @@ export default class extends Controller {
this.viewer.addHandler("open", (data) => {
for (let article of this.articlesValue) {
let pagenum = article.canvases_parts[0]
- pagenum = parseInt(pagenum.substring(pagenum.lastIndexOf('_')+1, pagenum.lastIndexOf("#xywh")))
+ pagenum = parseInt(pagenum.substring(pagenum.lastIndexOf('/')+1, pagenum.lastIndexOf("#xywh")))
if (pagenum === this.currentPageValue) {
let bbox = article.bbox
let loc = this.viewer.viewport.imageToViewportRectangle(bbox[0], bbox[1], bbox[2], bbox[3])
diff --git a/app/javascript/packs/stylesheets/catalog.scss b/app/javascript/packs/stylesheets/catalog.scss
index b793509d32367b8de89282cdcb40ea274354473b..1f8e54bf686e9deb81c09ad84845226747bc7493 100644
--- a/app/javascript/packs/stylesheets/catalog.scss
+++ b/app/javascript/packs/stylesheets/catalog.scss
@@ -1,16 +1,16 @@
////////////////// General //////////////////
-html, body {
- height: 100%;
-}
-body .container-fluid {
- height:100%;
-}
-#navigation {
- height: 8%;
-}
-#main-content {
- height: 92%;
-}
+// html, body {
+// height: 100%;
+// }
+// body .container-fluid {
+// height:100%;
+// }
+// #navigation {
+// height: 8%;
+// }
+// #main-content {
+// height: 92%;
+// }
/////////////////////////////////////////////
////////////////// Catalog index //////////////////
@@ -21,7 +21,7 @@ body .container-fluid {
background-color: #EEE;
}
.search_result.selected {
- border: 2px solid #32a1ce;
+ border: 2px solid darken(hsl(180, 15%, 30%), 10%);
padding: calc(0.5em - 2px);
}
#canvas_wide_dates_histogram {
@@ -31,11 +31,19 @@ body .container-fluid {
////////////////// Facets //////////////////
#facets .constrained {
- background: lightgreen;
+ background: lighten(hsl(180, 15%, 30%), 45%);
+}
+
+.accordion-button:not(.collapsed) {
+ color: hsl(180, 15%, 30%);
+}
+
+.accordion-button:not(.collapsed)::after {
+ background-image: url("data:image/svg+xml,%3csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 16 16' fill='%23212529'%3e%3cpath fill-rule='evenodd' d='M1.646 4.646a.5.5 0 0 1 .708 0L8 10.293l5.646-5.647a.5.5 0 0 1 .708.708l-6 6a.5.5 0 0 1-.708 0l-6-6a.5.5 0 0 1 0-.708z'/%3e%3c/svg%3e");
}
li.selected_constraint {
- color: green;
+ color: darken(hsl(180, 15%, 30%), 10%);
}
////////////////////////////////////////////
@@ -43,9 +51,11 @@ li.selected_constraint {
#openseadragon_view {
width: auto;
height: 85vh;
+ border-color: hsl(180, 15%, 30%);
border-style: solid;
- border-width: 2px;
- background-color: #AAAAAA;
+ border-width: 1px;
+ border-radius: 6px;
+ background-color: #b3c3c7;
}
#viewer_container {
position: relative;
@@ -57,11 +67,11 @@ li.selected_constraint {
#page_counter {
position: absolute;
- background-color: #8AF;
+ background-color: #7397a0;
border-color: #000;
border-style: solid;
- font-weight: bold;
- border-width: 2px;
+ border-width: 1px;
+ border-radius: 6px;
top: 1vh;
right: 3.5vh;
padding: 0px 2px;
@@ -109,66 +119,66 @@ li.selected_constraint {
///////////////////////////////////////////////
////////////////// Show Experiment /////////////////////
-.tool:hover {
- cursor: grab;
-}
-.tf-nc.tool-slot {
- border-style: dashed;
- width:8vw;
- height:10vh;
- padding: 0;
-}
-.tf-nc.possible-tool-slot {
- border-color: cornflowerblue;
- border-width: 3px;
-}
-.tf-nc.tool-slot-hover {
- border: 1em solid blue !important;
-}
+// .tool:hover {
+// cursor: grab;
+// }
+// .tf-nc.tool-slot {
+// border-style: dashed;
+// width:8vw;
+// height:10vh;
+// padding: 0;
+// }
+// .tf-nc.possible-tool-slot {
+// border-color: cornflowerblue;
+// border-width: 3px;
+// }
+// .tf-nc.tool-slot-hover {
+// border: 1em solid blue !important;
+// }
-.tool-status {
- height: 1em;
- width: 1em;
- border-radius: 50%;
- border-color: black;
- border-width: 1px;
- border-style: solid;
- display: inline-block;
-}
-.tool-status-created {
- background-color: gray;
-}
-.tool-status-configured {
- background-color: white;
-}
-.tool-status-error {
- background-color: red;
-}
-.tool-status-running {
- background-color: gold;
-}
-.tool-status-finished {
- background-color: green;
-}
+// .tool-status {
+// height: 1em;
+// width: 1em;
+// border-radius: 50%;
+// border-color: black;
+// border-width: 1px;
+// border-style: solid;
+// display: inline-block;
+// }
+// .tool-status-created {
+// background-color: gray;
+// }
+// .tool-status-configured {
+// background-color: white;
+// }
+// .tool-status-error {
+// background-color: red;
+// }
+// .tool-status-running {
+// background-color: gold;
+// }
+// .tool-status-finished {
+// background-color: green;
+// }
-.tool-slot-occupied {
- width:15vw;
- height:15vh;
- padding: 0;
-}
-#experiment_area {
- display: flex;
- align-items: center;
- justify-content: center;
-}
-#experiment_canvas {
- display: flex;
- align-items: center;
- justify-content: center;
- height: 100%;
- width: 100%;
- overflow: visible;
-}
+// .tool-slot-occupied {
+// width:15vw;
+// height:15vh;
+// padding: 0;
+// }
+// #experiment_area {
+// display: flex;
+// align-items: center;
+// justify-content: center;
+// }
+// #experiment_canvas {
+// display: flex;
+// align-items: center;
+// justify-content: center;
+// height: 100%;
+// width: 100%;
+// overflow: visible;
+// }
////////////////////////////////////////////////////////
////////////////////// Index Datasets ////////////////////
@@ -182,10 +192,10 @@ li.selected_constraint {
padding: 0.5em;
}
.dataset_document:hover {
- background-color: #FBFBFB;
+ background-color: #EEE;
}
.dataset_document.selected {
- border: 2px solid #32a1ce;
+ border: 2px solid darken(hsl(180, 15%, 30%), 10%);
padding: calc(0.5em - 2px);
}
////////////////////////////////////////////////////////
\ No newline at end of file
diff --git a/app/javascript/packs/utils/dataset_api.js b/app/javascript/packs/utils/dataset_api.js
index 622c80ea483d2d34cf63aa7d80b6155c82c525f8..49c848a2c439e539ea24664aebd42f18a6ee45d8 100644
--- a/app/javascript/packs/utils/dataset_api.js
+++ b/app/javascript/packs/utils/dataset_api.js
@@ -1,9 +1,11 @@
+import {addPrefixURL} from "../application.js"
+
export class DatasetAPI {
static create_dataset(title, callback) {
$.ajax({
type: "POST",
- url: "/dataset/create",
- data: {title: title},
+ url: addPrefixURL() + "/dataset/create",
+ data: { title: title },
headers: {
'X-CSRF-Token': $('meta[name="csrf-token"]').attr('content')
},
@@ -18,8 +20,8 @@ export class DatasetAPI {
static rename_dataset(id, title, callback) {
$.ajax({
type: "POST",
- url: "/dataset/rename",
- data: {id: id, title: title},
+ url: addPrefixURL() + "/dataset/rename",
+ data: { id: id, title: title },
headers: {
'X-CSRF-Token': $('meta[name="csrf-token"]').attr('content')
},
@@ -34,8 +36,8 @@ export class DatasetAPI {
static import_dataset(id, title, callback) {
$.ajax({
type: "POST",
- url: "/dataset/import",
- data: {original_dataset_id: id, title: title},
+ url: addPrefixURL() + "/dataset/import",
+ data: { original_dataset_id: id, title: title },
headers: {
'X-CSRF-Token': $('meta[name="csrf-token"]').attr('content')
},
@@ -51,8 +53,8 @@ export class DatasetAPI {
static delete_dataset(datasetId, callback) {
$.ajax({
type: "POST",
- url: "/dataset/delete",
- data: {dataset_id: datasetId},
+ url: addPrefixURL() + "/dataset/delete",
+ data: { dataset_id: datasetId },
headers: {
'X-CSRF-Token': $('meta[name="csrf-token"]').attr('content')
},
@@ -68,7 +70,7 @@ export class DatasetAPI {
static update_datasets_list(callback) {
$.ajax({
type: "GET",
- url: "/datasets/update",
+ url: addPrefixURL() + "/datasets/update",
headers: {
'X-CSRF-Token': $('meta[name="csrf-token"]').attr('content')
},
@@ -85,7 +87,7 @@ export class DatasetAPI {
static setCurrentWorkingDataset(datasetId, callback) {
$.ajax({
type: "POST",
- url: "/datasets/working_dataset",
+ url: addPrefixURL() + "/datasets/working_dataset",
headers: {
'X-CSRF-Token': $('meta[name="csrf-token"]').attr('content')
},
@@ -101,7 +103,7 @@ export class DatasetAPI {
static addSelectedDocumentsToWorkingDataset(documentsIds, callback) {
$.ajax({
type: "POST",
- url: "/datasets/add_selected_documents",
+ url: addPrefixURL() + "/datasets/add_selected_documents",
headers: {
'X-CSRF-Token': $('meta[name="csrf-token"]').attr('content')
},
@@ -117,7 +119,7 @@ export class DatasetAPI {
static addSelectedCompoundToWorkingDataset(compoundId, callback) {
$.ajax({
type: "POST",
- url: "/datasets/add_compound",
+ url: addPrefixURL() + "/datasets/add_compound",
headers: {
'X-CSRF-Token': $('meta[name="csrf-token"]').attr('content')
},
@@ -133,7 +135,7 @@ export class DatasetAPI {
static removeSelectedDocumentsToWorkingDataset(documentsIds, callback) {
$.ajax({
type: "POST",
- url: "/datasets/remove_selected_documents",
+ url: addPrefixURL() + "/datasets/remove_selected_documents",
headers: {
'X-CSRF-Token': $('meta[name="csrf-token"]').attr('content')
},
@@ -149,7 +151,7 @@ export class DatasetAPI {
static addAllDocumentsToWorkingDataset(searchParams, callback) {
$.ajax({
type: "POST",
- url: "/datasets/add_all_documents",
+ url: addPrefixURL() + "/datasets/add_all_documents",
headers: {
'X-CSRF-Token': $('meta[name="csrf-token"]').attr('content')
},
@@ -165,7 +167,7 @@ export class DatasetAPI {
static exportDataset(datasetId, exportType, callback) {
$.ajax({
type: "POST",
- url: "/datasets/export_dataset",
+ url: addPrefixURL() + "/datasets/export_dataset",
headers: {
'X-CSRF-Token': $('meta[name="csrf-token"]').attr('content')
},
@@ -182,7 +184,7 @@ export class DatasetAPI {
static paginateDataset(datasetId, page, per_page, sort, sort_order, type, callback) {
$.ajax({
type: "POST",
- url: `/dataset/${datasetId}/paginate`,
+ url: addPrefixURL() + `/dataset/${datasetId}/paginate`,
headers: {
'X-CSRF-Token': $('meta[name="csrf-token"]').attr('content')
},
@@ -199,7 +201,7 @@ export class DatasetAPI {
static getDatasets(callback) {
$.ajax({
type: "GET",
- url: `/datasets/list`,
+ url: addPrefixURL() + "/datasets/list",
headers: {
'X-CSRF-Token': $('meta[name="csrf-token"]').attr('content')
},
@@ -213,7 +215,7 @@ export class DatasetAPI {
static toggleSharingStatus(dataset_id, callback) {
$.ajax({
type: "POST",
- url: `/dataset/toggle_sharing_status`,
+ url: addPrefixURL() + "/dataset/toggle_sharing_status",
headers: {
'X-CSRF-Token': $('meta[name="csrf-token"]').attr('content')
},
diff --git a/app/javascript/packs/utils/search_api.js b/app/javascript/packs/utils/search_api.js
index 52408f59d79eaf7d8f7d98764809436fe71581be..7b01d2792d85bf28537bb6cac7e492d6441e0a52 100644
--- a/app/javascript/packs/utils/search_api.js
+++ b/app/javascript/packs/utils/search_api.js
@@ -1,10 +1,12 @@
+import {addPrefixURL} from "../application.js"
+
export class SearchAPI {
static load_dataset_named_entities(dataset_id, callback) {
$.ajax({
type: "POST",
- url: "/dataset_named_entities",
- data: {dataset_id: dataset_id},
+ url: addPrefixURL() + "/dataset_named_entities",
+ data: { dataset_id: dataset_id },
headers: {
'X-CSRF-Token': $('meta[name="csrf-token"]').attr('content')
},
@@ -20,8 +22,8 @@ export class SearchAPI {
static load_named_entities(docs_ids, callback) {
$.ajax({
type: "POST",
- url: "/named_entities",
- data: {docs_ids: docs_ids},
+ url: addPrefixURL() + "/named_entities",
+ data: { docs_ids: docs_ids },
headers: {
'X-CSRF-Token': $('meta[name="csrf-token"]').attr('content')
},
@@ -37,8 +39,8 @@ export class SearchAPI {
static facetPagination(fieldName, nbPages, currentPage, callback) {
$.ajax({
type: "POST",
- url: "/catalog/facet_pagination",
- data: {field_name: fieldName, nb_pages: nbPages, current_page: currentPage},
+ url: addPrefixURL() + "/catalog/facet_pagination",
+ data: { field_name: fieldName, nb_pages: nbPages, current_page: currentPage },
headers: {
'X-CSRF-Token': $('meta[name="csrf-token"]').attr('content')
},
@@ -54,7 +56,7 @@ export class SearchAPI {
static wideDatesHistogram(callback) {
$.ajax({
type: "POST",
- url: "/catalog/wide_dates_histogram",
+ url: addPrefixURL() + "/catalog/wide_dates_histogram",
data: {},
headers: {
'X-CSRF-Token': $('meta[name="csrf-token"]').attr('content')
@@ -71,7 +73,7 @@ export class SearchAPI {
static confirm_compond_creation(article_parts, callback) {
$.ajax({
type: "POST",
- url: `/catalog/confirm_compound_creation`,
+ url: addPrefixURL() + "/catalog/confirm_compound_creation",
headers: {
'X-CSRF-Token': $('meta[name="csrf-token"]').attr('content')
},
@@ -87,7 +89,7 @@ export class SearchAPI {
static create_compound(title, all_text, issue_id, article_parts_ids, callback) {
$.ajax({
type: "POST",
- url: `/catalog/create_compound`,
+ url: addPrefixURL() + "/catalog/create_compound",
headers: {
'X-CSRF-Token': $('meta[name="csrf-token"]').attr('content')
},
@@ -106,7 +108,7 @@ export class SearchAPI {
static delete_compound_article(compound_id, callback) {
$.ajax({
type: "POST",
- url: `/catalog/delete_compound`,
+ url: addPrefixURL() + "/catalog/delete_compound",
headers: {
'X-CSRF-Token': $('meta[name="csrf-token"]').attr('content')
},
@@ -122,7 +124,7 @@ export class SearchAPI {
static random_sample(callback) {
$.ajax({
type: "POST",
- url: `/catalog/random_sample`,
+ url: addPrefixURL() + "/catalog/random_sample",
headers: {
'X-CSRF-Token': $('meta[name="csrf-token"]').attr('content')
},
diff --git a/app/mailers/application_mailer.rb b/app/mailers/application_mailer.rb
index 286b2239d139960190594225e0134fe1a5c05370..3c34c8148f105d699e8ec9b769531192f8637d9a 100644
--- a/app/mailers/application_mailer.rb
+++ b/app/mailers/application_mailer.rb
@@ -1,4 +1,4 @@
class ApplicationMailer < ActionMailer::Base
- default from: 'from@example.com'
- layout 'mailer'
+ default from: "from@example.com"
+ layout "mailer"
end
diff --git a/app/models/article.rb b/app/models/article.rb
index 145c405ad2c50adf6a5cd1884ed038d708c07bb4..27451ce18cc0780e43af3d90f8308c6713ac85f6 100644
--- a/app/models/article.rb
+++ b/app/models/article.rb
@@ -1,100 +1,99 @@
class Article
+ attr_accessor :id, :title, :all_text, :date_created, :language, :canvases_parts, :newspaper, :issue_id, :thumbnail_url, :bbox
- attr_accessor :id, :title, :all_text, :date_created, :language, :canvases_parts, :newspaper, :issue_id, :thumbnail_url, :bbox
+ def self.from_solr(id)
+ solr_doc = SolrSearcher.get_doc_by_id id
+ Article.from_solr_doc solr_doc
+ end
- def self.from_solr id
- solr_doc = SolrSearcher.get_doc_by_id id
- Article.from_solr_doc solr_doc
- end
+ def self.from_solr_doc(solr_doc)
+ a = Article.new
+ a.id = solr_doc["id"]
+ a.title = solr_doc["title_ssi"]
+ a.language = solr_doc["language_ssi"]
+ a.all_text = solr_doc["all_text_t#{a.language}_siv"]
+ a.date_created = solr_doc["date_created_ssi"]
+ a.issue_id = solr_doc["from_issue_ssi"]
+ a.newspaper = solr_doc["member_of_collection_ids_ssim"].first
+ a.thumbnail_url = solr_doc["thumbnail_url_ss"]
+ a.canvases_parts = solr_doc["canvases_parts_ssm"]
+ a.bbox = a.get_location
+ a
+ end
- def self.from_solr_doc solr_doc
- a = Article.new
- a.id = solr_doc['id']
- a.title = solr_doc['title_ssi']
- a.language = solr_doc['language_ssi']
- a.all_text = solr_doc["all_text_t#{a.language}_siv"]
- a.date_created = solr_doc['date_created_ssi']
- a.issue_id = solr_doc['from_issue_ssi']
- a.newspaper = solr_doc['member_of_collection_ids_ssim'].first
- a.thumbnail_url = solr_doc['thumbnail_url_ss']
- a.canvases_parts = solr_doc['canvases_parts_ssm']
- a.bbox = a.get_location
- a
- end
+ def to_solr(page_iiif_url)
+ solr_doc = {}
+ solr_doc["id"] = self.id
+ solr_doc["title_ssi"] = self.title
+ solr_doc["language_ssi"] = self.language
+ solr_doc["all_text_t#{self.language}_siv"] = self.all_text
+ solr_doc["all_text_unstemmed_t#{self.language}_siv"] = self.all_text
+ solr_doc["date_created_ssi"] = self.date_created
+ solr_doc["date_created_dtsi"] = DateTime.parse(self.date_created).strftime("%Y-%m-%dT%H:%M:%SZ")
+ solr_doc["year_isi"] = solr_doc["date_created_ssi"][0..3].to_i
+ d = DateTime.parse solr_doc["date_created_dtsi"]
+ solr_doc["month_isi"] = d.month
+ solr_doc["day_isi"] = d.wday
+ solr_doc["from_issue_ssi"] = self.issue_id
+ solr_doc["member_of_collection_ids_ssim"] = self.newspaper
+ solr_doc["canvases_parts_ssm"] = self.canvases_parts
+ solr_doc["thumbnail_url_ss"] = self.get_iiif_url(page_iiif_url)
+ solr_doc["has_model_ssim"] = "Article"
+ solr_doc
+ end
- def to_solr(page_iiif_url)
- solr_doc = {}
- solr_doc['id'] = self.id
- solr_doc['title_ssi'] = self.title
- solr_doc["language_ssi"] = self.language
- solr_doc["all_text_t#{self.language}_siv"] = self.all_text
- solr_doc["all_text_unstemmed_t#{self.language}_siv"] = self.all_text
- solr_doc['date_created_ssi'] = self.date_created
- solr_doc['date_created_dtsi'] = DateTime.parse(self.date_created).strftime('%Y-%m-%dT%H:%M:%SZ')
- solr_doc['year_isi'] = solr_doc['date_created_ssi'][0..3].to_i
- d = DateTime.parse solr_doc["date_created_dtsi"]
- solr_doc['month_isi'] = d.month
- solr_doc['day_isi'] = d.wday
- solr_doc['from_issue_ssi'] = self.issue_id
- solr_doc['member_of_collection_ids_ssim'] = self.newspaper
- solr_doc['canvases_parts_ssm'] = self.canvases_parts
- solr_doc['thumbnail_url_ss'] = self.get_iiif_url(page_iiif_url)
- solr_doc['has_model_ssim'] = 'Article'
- solr_doc
+ def get_thumbnail
+ if Rails.configuration.iiif_sources[:local].include? self.newspaper
+ pagenum = self.canvases_parts[0][self.canvases_parts[0].rindex("_") + 1...self.canvases_parts[0].rindex("#")].to_i
+ self.get_iiif_url("https://iiif.newseye.eu/iiif/#{self.newspaper}/#{self.issue_id}_page_#{pagenum}.ptif")
+ elsif Rails.configuration.iiif_sources[:external].include? self.newspaper
+ self.thumbnail_url
+ elsif Rails.configuration.iiif_sources[:external_onb].include? self.newspaper
+ self.thumbnail_url
end
+ end
- def get_thumbnail
- if Rails.configuration.iiif_sources[:local].include? self.newspaper
- pagenum = self.canvases_parts[0][self.canvases_parts[0].rindex('_')+1...self.canvases_parts[0].rindex('#')].to_i
- self.get_iiif_url("https://iiif.newseye.eu/iiif/#{self.newspaper}/#{self.issue_id}_page_#{pagenum}.ptif")
- elsif Rails.configuration.iiif_sources[:external].include? self.newspaper
- self.thumbnail_url
- elsif Rails.configuration.iiif_sources[:external_onb].include? self.newspaper
- self.thumbnail_url
- end
- end
+ def get_location
+ coords = self.canvases_parts.map { |c| c[c.rindex("#xywh=") + 6..-1].split(",").map(&:to_i) }
+ min_x = coords.map { |coord| coord[0] }.min
+ max_x = coords.map { |coord| coord[0] + coord[2] }.max
+ min_y = coords.map { |coord| coord[1] }.min
+ max_y = coords.map { |coord| coord[1] + coord[3] }.max
+ canvas_coords = [min_x, max_x, min_y, max_y]
+ canvas_size = [canvas_coords[1] - canvas_coords[0], canvas_coords[3] - canvas_coords[2]]
+ [min_x, min_y, canvas_size[0], canvas_size[1]]
+ end
- def get_location
- coords = self.canvases_parts.map { |c| c[c.rindex('#xywh=')+6..-1].split(',').map(&:to_i) }
- min_x = coords.map{ |coord| coord[0] }.min
- max_x = coords.map{ |coord| coord[0] + coord[2] }.max
- min_y = coords.map{ |coord| coord[1] }.min
- max_y = coords.map{ |coord| coord[1] + coord[3] }.max
- canvas_coords = [min_x, max_x, min_y, max_y]
- canvas_size = [canvas_coords[1]-canvas_coords[0], canvas_coords[3]-canvas_coords[2]]
- [min_x,min_y,canvas_size[0],canvas_size[1]]
- end
+ def get_iiif_url(page_iiif_url)
+ canvas_url = self.canvases_parts[0]
+ coords = self.canvases_parts.map { |c| c[c.rindex("#xywh=") + 6..-1].split(",").map(&:to_i) }
+ min_x = coords.map { |coord| coord[0] }.min
+ max_x = coords.map { |coord| coord[0] + coord[2] }.max
+ min_y = coords.map { |coord| coord[1] }.min
+ max_y = coords.map { |coord| coord[1] + coord[3] }.max
+ pagenum = canvas_url[canvas_url.rindex("_") + 1...canvas_url.rindex("#")].to_i
+ "#{page_iiif_url}/#{min_x},#{min_y},#{max_x - min_x},#{max_y - min_y}/full/0/default.jpg"
+ end
- def get_iiif_url(page_iiif_url)
- canvas_url = self.canvases_parts[0]
- coords = self.canvases_parts.map { |c| c[c.rindex('#xywh=')+6..-1].split(',').map(&:to_i) }
- min_x = coords.map{ |coord| coord[0] }.min
- max_x = coords.map{ |coord| coord[0] + coord[2] }.max
- min_y = coords.map{ |coord| coord[1] }.min
- max_y = coords.map{ |coord| coord[1] + coord[3] }.max
- pagenum = canvas_url[canvas_url.rindex('_')+1...canvas_url.rindex('#')].to_i
- "#{page_iiif_url}/#{min_x},#{min_y},#{max_x-min_x},#{max_y-min_y}/full/0/default.jpg"
+ def self.named_entities(article_id)
+ nems = SolrSearcher.query({ q: "article_id_ssi:#{article_id}", rows: 1000000 })["response"]["docs"]
+ output = { LOC: {}, PER: {}, ORG: {}, HumanProd: {} }
+ nems.select { |ne_solr| ne_solr["type_ssi"] == "LOC" }.each do |ne_solr|
+ output[:LOC][ne_solr["linked_entity_ssi"]] = [] unless output[:LOC].has_key? ne_solr["linked_entity_ssi"]
+ output[:LOC][ne_solr["linked_entity_ssi"]].append(ne_solr)
end
-
- def self.named_entities(article_id)
- nems = SolrSearcher.query({q:"article_id_ssi:#{article_id}", rows: 1000000})['response']['docs']
- output = {LOC: {}, PER: {}, ORG: {}, HumanProd: {}}
- nems.select {|ne_solr| ne_solr['type_ssi'] == "LOC"}.each do |ne_solr|
- output[:LOC][ne_solr['linked_entity_ssi']] = [] unless output[:LOC].has_key? ne_solr['linked_entity_ssi']
- output[:LOC][ne_solr['linked_entity_ssi']].append(ne_solr)
- end
- nems.select {|ne_solr| ne_solr['type_ssi'] == "PER"}.each do |ne_solr|
- output[:PER][ne_solr['linked_entity_ssi']] = [] unless output[:PER].has_key? ne_solr['linked_entity_ssi']
- output[:PER][ne_solr['linked_entity_ssi']].append(ne_solr)
- end
- nems.select {|ne_solr| ne_solr['type_ssi'] == "ORG"}.each do |ne_solr|
- output[:ORG][ne_solr['linked_entity_ssi']] = [] unless output[:ORG].has_key? ne_solr['linked_entity_ssi']
- output[:ORG][ne_solr['linked_entity_ssi']].append(ne_solr)
- end
- nems.select {|ne_solr| ne_solr['type_ssi'] == "HumanProd"}.each do |ne_solr|
- output[:HumanProd][ne_solr['linked_entity_ssi']] = [] unless output[:HumanProd].has_key? ne_solr['linked_entity_ssi']
- output[:HumanProd][ne_solr['linked_entity_ssi']].append(ne_solr)
- end
- output
+ nems.select { |ne_solr| ne_solr["type_ssi"] == "PER" }.each do |ne_solr|
+ output[:PER][ne_solr["linked_entity_ssi"]] = [] unless output[:PER].has_key? ne_solr["linked_entity_ssi"]
+ output[:PER][ne_solr["linked_entity_ssi"]].append(ne_solr)
+ end
+ nems.select { |ne_solr| ne_solr["type_ssi"] == "ORG" }.each do |ne_solr|
+ output[:ORG][ne_solr["linked_entity_ssi"]] = [] unless output[:ORG].has_key? ne_solr["linked_entity_ssi"]
+ output[:ORG][ne_solr["linked_entity_ssi"]].append(ne_solr)
+ end
+ nems.select { |ne_solr| ne_solr["type_ssi"] == "HumanProd" }.each do |ne_solr|
+ output[:HumanProd][ne_solr["linked_entity_ssi"]] = [] unless output[:HumanProd].has_key? ne_solr["linked_entity_ssi"]
+ output[:HumanProd][ne_solr["linked_entity_ssi"]].append(ne_solr)
end
-end
\ No newline at end of file
+ output
+ end
+end
diff --git a/app/models/compound_article.rb b/app/models/compound_article.rb
index 5372b31deaffddfdd73d0f4fa3295f9391103214..dec031193b74e0b13282e1a347a490d0fcbcb0da 100644
--- a/app/models/compound_article.rb
+++ b/app/models/compound_article.rb
@@ -1,6 +1,4 @@
class CompoundArticle < ActiveRecord::Base
-
- belongs_to :user, optional: false
- validates :title, length: { minimum: 1 }
-
-end
\ No newline at end of file
+ belongs_to :user, optional: false
+ validates :title, length: { minimum: 1 }
+end
diff --git a/app/models/concerns/abstract_searcher.rb b/app/models/concerns/abstract_searcher.rb
index 0bced801a3d7e2c1483a9f0165a9a8678d4e10c1..d569b1d1c7890fdd6a71baacfc3be5870a0170bb 100644
--- a/app/models/concerns/abstract_searcher.rb
+++ b/app/models/concerns/abstract_searcher.rb
@@ -1,12 +1,11 @@
module AbstractSearcher
- extend ActiveSupport::Concern
+ extend ActiveSupport::Concern
- def self.query
- raise NotImplementedError, "Subclasses must define `query`."
- end
+ def self.query
+ raise NotImplementedError, "Subclasses must define `query`."
+ end
- def self.get_doc_by_id(id)
- raise NotImplementedError, "Subclasses must define `get_doc_by_id`."
- end
-
-end
\ No newline at end of file
+ def self.get_doc_by_id(id)
+ raise NotImplementedError, "Subclasses must define `get_doc_by_id`."
+ end
+end
diff --git a/app/models/current.rb b/app/models/current.rb
new file mode 100644
index 0000000000000000000000000000000000000000..73a9744b335eab8688cffa0c1207a6248c08036d
--- /dev/null
+++ b/app/models/current.rb
@@ -0,0 +1,3 @@
+class Current < ActiveSupport::CurrentAttributes
+ attribute :user
+end
diff --git a/app/models/dataset.rb b/app/models/dataset.rb
index 59b49ae3ac77cfd28bf941640a14451f3a133a64..be88129d01b61644d3a95b36df75de93e92b505e 100644
--- a/app/models/dataset.rb
+++ b/app/models/dataset.rb
@@ -1,122 +1,121 @@
class Dataset < ActiveRecord::Base
- # after_find :nb_issues, :nb_articles
- belongs_to :user, optional: false
- validates :title, length: { minimum: 1 }
+ # after_find :nb_issues, :nb_articles
+ belongs_to :user, optional: false
+ validates :title, length: { minimum: 1 }
- def add_documents(documents_ids)
- existing = []
- documents_ids.each do |doc_id|
- if self.documents.any?{ |doc| doc['id'] == doc_id }
- existing << doc_id
- else
- doc_type = doc_id.index("_article_").nil? ? "issue" : "article"
- self.documents << {id: doc_id, type: doc_type}
- end
- end
- self.save
- return existing
- end
-
- def add_compound(compound_id)
- existing = []
- if self.documents.any?{ |doc| doc['id'] == compound_id }
- existing << compound_id
- else
- doc_type = "compound"
- self.documents << {id: compound_id, type: doc_type}
- end
- self.save
- return existing
+ def add_documents(documents_ids)
+ existing = []
+ documents_ids.each do |doc_id|
+ if self.documents.any? { |doc| doc["id"] == doc_id }
+ existing << doc_id
+ else
+ doc_type = doc_id.index("_article_").nil? ? "issue" : "article"
+ self.documents << { id: doc_id, type: doc_type }
+ end
end
+ self.save
+ return existing
+ end
- def remove_documents(documents_ids)
- self.documents.delete_if{ |elt| documents_ids.include? elt['id'] }
- self.save
+ def add_compound(compound_id)
+ existing = []
+ if self.documents.any? { |doc| doc["id"] == compound_id }
+ existing << compound_id
+ else
+ doc_type = "compound"
+ self.documents << { id: compound_id, type: doc_type }
end
+ self.save
+ return existing
+ end
- def contains doc_id
- self.documents.index { |doc| doc['id'] == doc_id }.nil? ? false : true
- end
+ def remove_documents(documents_ids)
+ self.documents.delete_if { |elt| documents_ids.include? elt["id"] }
+ self.save
+ end
- def nb_issues
- self.documents.select do |doc|
- doc['type'] == 'issue'
- end.size
- end
+ def contains(doc_id)
+ self.documents.index { |doc| doc["id"] == doc_id }.nil? ? false : true
+ end
- def nb_articles
- self.documents.select do |doc|
- doc['type'] == 'article'
- end.size
- end
+ def nb_issues
+ self.documents.select do |doc|
+ doc["type"] == "issue"
+ end.size
+ end
- def nb_compound_articles
- self.documents.select do |doc|
- doc['type'] == 'compound'
- end.size
- end
+ def nb_articles
+ self.documents.select do |doc|
+ doc["type"] == "article"
+ end.size
+ end
- def fetch_paginated_documents(page, per_page, sort, sort_order, type, recursive=false)
- docs = self.documents.select {|doc| type == "all" || doc['type'] == type }
+ def nb_compound_articles
+ self.documents.select do |doc|
+ doc["type"] == "compound"
+ end.size
+ end
- nb_pages = (docs.size / per_page.to_f).ceil
- nb_pages = 1 if nb_pages == 0
- sort = (sort == "default") ? "score" : sort
- solr_docs = nil
+ def fetch_paginated_documents(page, per_page, sort, sort_order, type, recursive = false)
+ docs = self.documents.select { |doc| type == "all" || doc["type"] == type }
- compounds_ids = docs.select{|d| d['type'] == "compound" }.map{ |d| d['id'] }
- compound_articles = CompoundArticle.find(compounds_ids)
+ nb_pages = (docs.size / per_page.to_f).ceil
+ nb_pages = 1 if nb_pages == 0
+ sort = (sort == "default") ? "score" : sort
+ solr_docs = nil
+ compounds_ids = docs.select { |d| d["type"] == "compound" }.map { |d| d["id"] }
+ compound_articles = CompoundArticle.find(compounds_ids)
- solr_ids = docs.select{|d| d['type'] != "compound" }.map{ |d| d['id'] }
- unless solr_ids.empty?
- solr_docs = SolrSearcher.query({
+ solr_ids = docs.select { |d| d["type"] != "compound" }.map { |d| d["id"] }
+ unless solr_ids.empty?
+ solr_docs = SolrSearcher.query({
q: "*:*",
- fq: "id:(#{solr_ids.join(' ')})",
+ fq: "id:(#{solr_ids.join(" ")})",
rows: per_page,
sort: "#{sort} #{sort_order}",
- start: (page-1)*per_page
- })['response']['docs']
- solr_docs.map! do |solr_doc|
- if solr_doc['id'].index("_article_").nil?
- Issue.from_solr_doc solr_doc
- else
- Article.from_solr_doc solr_doc
- end
- end
- end
- if recursive and page < nb_pages and !solr_docs.nil?
- solr_docs = solr_docs.concat fetch_paginated_documents(page+1, per_page, sort, sort_order, type, true)[:docs]
+ start: (page - 1) * per_page,
+ })["response"]["docs"]
+ solr_docs.map! do |solr_doc|
+ if solr_doc["id"].index("_article_").nil?
+ Issue.from_solr_doc solr_doc
+ else
+ Article.from_solr_doc solr_doc
end
- return {docs: solr_docs.nil? ? compound_articles : solr_docs+compound_articles, nb_pages: nb_pages}
+ end
+ end
+ if recursive and page < nb_pages and !solr_docs.nil?
+ solr_docs = solr_docs.concat fetch_paginated_documents(page + 1, per_page, sort, sort_order, type, true)[:docs]
end
+ return { docs: solr_docs.nil? ? compound_articles : solr_docs + compound_articles, nb_pages: nb_pages }
+ end
- def named_entities
- article_ids = self.documents.select {|d| d['type'] == 'article' }.map{|d| d['id']}
- issue_ids = self.documents.select {|d| d['type'] == 'issue' }.map{|d| d['id']}
- parts_ids = self.documents.select {|d| d['type'] == 'compound' }.map{|d| CompoundArticle.find(d['id']).parts}.flatten.uniq
- nems = []
- nems = SolrSearcher.query({q: "*:*", fq: "article_id_ssi:(#{article_ids.join(' OR ')})", rows: 1000000})['response']['docs'] unless article_ids.empty?
- nems += SolrSearcher.query({q: "*:*", fq: "article_id_ssi:(#{parts_ids.join(' OR ')})", rows: 1000000})['response']['docs'] unless parts_ids.empty?
- nems += SolrSearcher.query({q: "*:*", fq: "issue_id_ssi:(#{issue_ids.join(' OR ')})", rows: 1000000})['response']['docs'] unless issue_ids.empty?
- output = {LOC: {}, PER: {}, ORG: {}, HumanProd: {}}
- nems.select {|ne_solr| ne_solr['type_ssi'] == "LOC"}.each do |ne_solr|
- output[:LOC][ne_solr['linked_entity_ssi']] = [] unless output[:LOC].has_key? ne_solr['linked_entity_ssi']
- output[:LOC][ne_solr['linked_entity_ssi']].append(ne_solr)
- end
- nems.select {|ne_solr| ne_solr['type_ssi'] == "PER"}.each do |ne_solr|
- output[:PER][ne_solr['linked_entity_ssi']] = [] unless output[:PER].has_key? ne_solr['linked_entity_ssi']
- output[:PER][ne_solr['linked_entity_ssi']].append(ne_solr)
- end
- nems.select {|ne_solr| ne_solr['type_ssi'] == "ORG"}.each do |ne_solr|
- output[:ORG][ne_solr['linked_entity_ssi']] = [] unless output[:ORG].has_key? ne_solr['linked_entity_ssi']
- output[:ORG][ne_solr['linked_entity_ssi']].append(ne_solr)
- end
- nems.select {|ne_solr| ne_solr['type_ssi'] == "HumanProd"}.each do |ne_solr|
- output[:HumanProd][ne_solr['linked_entity_ssi']] = [] unless output[:HumanProd].has_key? ne_solr['linked_entity_ssi']
- output[:HumanProd][ne_solr['linked_entity_ssi']].append(ne_solr)
- end
- output
+ def named_entities
+ article_ids = self.documents.select { |d| d["type"] == "article" }.map { |d| d["id"] }
+ issue_ids = self.documents.select { |d| d["type"] == "issue" }.map { |d| d["id"] }
+ parts_ids = self.documents.select { |d| d["type"] == "compound" }.map { |d| CompoundArticle.find(d["id"]).parts }.flatten.uniq
+ nems = []
+ nems = SolrSearcher.query({ q: "*:*", fq: "article_id_ssi:(#{article_ids.join(" OR ")})", rows: 1000000 })["response"]["docs"] unless article_ids.empty?
+ nems += SolrSearcher.query({ q: "*:*", fq: "article_id_ssi:(#{parts_ids.join(" OR ")})", rows: 1000000 })["response"]["docs"] unless parts_ids.empty?
+ nems += SolrSearcher.query({ q: "*:*", fq: "issue_id_ssi:(#{issue_ids.join(" OR ")})", rows: 1000000 })["response"]["docs"] unless issue_ids.empty?
+ output = { LOC: {}, PER: {}, ORG: {}, HumanProd: {} }
+ nems.select { |ne_solr| ne_solr["type_ssi"] == "LOC" }.each do |ne_solr|
+ output[:LOC][ne_solr["linked_entity_ssi"]] = [] unless output[:LOC].has_key? ne_solr["linked_entity_ssi"]
+ output[:LOC][ne_solr["linked_entity_ssi"]].append(ne_solr)
+ end
+ nems.select { |ne_solr| ne_solr["type_ssi"] == "PER" }.each do |ne_solr|
+ output[:PER][ne_solr["linked_entity_ssi"]] = [] unless output[:PER].has_key? ne_solr["linked_entity_ssi"]
+ output[:PER][ne_solr["linked_entity_ssi"]].append(ne_solr)
+ end
+ nems.select { |ne_solr| ne_solr["type_ssi"] == "ORG" }.each do |ne_solr|
+ output[:ORG][ne_solr["linked_entity_ssi"]] = [] unless output[:ORG].has_key? ne_solr["linked_entity_ssi"]
+ output[:ORG][ne_solr["linked_entity_ssi"]].append(ne_solr)
+ end
+ nems.select { |ne_solr| ne_solr["type_ssi"] == "HumanProd" }.each do |ne_solr|
+ output[:HumanProd][ne_solr["linked_entity_ssi"]] = [] unless output[:HumanProd].has_key? ne_solr["linked_entity_ssi"]
+ output[:HumanProd][ne_solr["linked_entity_ssi"]].append(ne_solr)
end
+ output
+ end
end
diff --git a/app/models/experiment.rb b/app/models/experiment.rb
index 4d46f6b504bea695e3546397de410da66919dd10..827b2b2458aad84229368d15987fc81364c60ce9 100644
--- a/app/models/experiment.rb
+++ b/app/models/experiment.rb
@@ -1,105 +1,103 @@
class Experiment < ActiveRecord::Base
+ belongs_to :user, optional: false
+ validates :title, length: { minimum: 1 }
- belongs_to :user, optional: false
- validates :title, length: { minimum: 1 }
-
- def add_tool(parent_id, tool)
- if parent_id != 0
- self.locate_tool(self.description, parent_id) do |t|
- t['children'] << tool.to_h
- end
- else
- self.description['children'] << tool.to_h
- end
+ def add_tool(parent_id, tool)
+ if parent_id != 0
+ self.locate_tool(self.description, parent_id) do |t|
+ t["children"] << tool.to_h
+ end
+ else
+ self.description["children"] << tool.to_h
end
+ end
- def delete_tool(tool_id)
- ids = detach_tool(self.description, nil, tool_id)
- end
+ def delete_tool(tool_id)
+ ids = detach_tool(self.description, nil, tool_id)
+ end
- def load_tools
- ids = gather_ids self.description
- Tool.where(id: ids).pluck(:id, :status, :tool_type, :input_type, :output_type, :parent_id).map do |t|
- [t[0], {id: t[0], status: t[1], type: t[2], input_type: t[3], output_type: t[4], parent_id: t[5]}]
- end.to_h
- end
+ def load_tools
+ ids = gather_ids self.description
+ Tool.where(id: ids).pluck(:id, :status, :tool_type, :input_type, :output_type, :parent_id).map do |t|
+ [t[0], { id: t[0], status: t[1], type: t[2], input_type: t[3], output_type: t[4], parent_id: t[5] }]
+ end.to_h
+ end
- def finished?
- tools = self.load_tools
- tools.values.all? { |t| t[:status] == "finished" }
- end
+ def finished?
+ tools = self.load_tools
+ tools.values.all? { |t| t[:status] == "finished" }
+ end
- def running?
- tools = self.load_tools
- tools.values.any? { |t| t[:status] == "running" }
- end
+ def running?
+ tools = self.load_tools
+ tools.values.any? { |t| t[:status] == "running" }
+ end
- def get_tool_ids
- gather_ids self.description
- end
+ def get_tool_ids
+ gather_ids self.description
+ end
- def continue_from(tool_id)
- locate_tool(self.description, tool_id) do |t|
- tools_to_start = t['children'].map { |c| c['tool']['id'] }
- tools_to_start.each do |tool_id|
- tool = Tool.find(tool_id)
- tool.run(true) if tool.runnable?
- end
- end
+ def continue_from(tool_id)
+ locate_tool(self.description, tool_id) do |t|
+ tools_to_start = t["children"].map { |c| c["tool"]["id"] }
+ tools_to_start.each do |tool_id|
+ tool = Tool.find(tool_id)
+ tool.run(true) if tool.runnable?
+ end
end
+ end
- private
+ private
- def locate_tool(tree_part, tool_id, &block)
- if tree_part.has_key?('tool')
- if tree_part['tool']['id'] == tool_id
- yield tree_part
- return true
- else
- tree_part['children'].each do |subtree|
- return true if locate_tool(subtree, tool_id, &block)
- end
- end
- else
- if tree_part['children'].empty?
- yield tree_part
- end
- tree_part['children'].each do |subtree|
- return true if locate_tool(subtree, tool_id, &block)
- end
+ def locate_tool(tree_part, tool_id, &block)
+ if tree_part.has_key?("tool")
+ if tree_part["tool"]["id"] == tool_id
+ yield tree_part
+ return true
+ else
+ tree_part["children"].each do |subtree|
+ return true if locate_tool(subtree, tool_id, &block)
end
- false
+ end
+ else
+ if tree_part["children"].empty?
+ yield tree_part
+ end
+ tree_part["children"].each do |subtree|
+ return true if locate_tool(subtree, tool_id, &block)
+ end
end
+ false
+ end
- def detach_tool(tree, parent_array, tool_id, &block)
- if tree.has_key?('tool')
- if tree['tool']['id'] == tool_id
- ids = gather_ids(tree)
- parent_array.delete(tree) unless parent_array.nil?
- return ids
- else
- tree['children'].each do |subtree|
- res = detach_tool(subtree, tree['children'], tool_id, &block)
- return res unless res.nil?
- end
- end
- else
- tree['children'].each do |subtree|
- res = detach_tool(subtree, tree['children'], tool_id, &block)
- return res unless res.nil?
- end
+ def detach_tool(tree, parent_array, tool_id, &block)
+ if tree.has_key?("tool")
+ if tree["tool"]["id"] == tool_id
+ ids = gather_ids(tree)
+ parent_array.delete(tree) unless parent_array.nil?
+ return ids
+ else
+ tree["children"].each do |subtree|
+ res = detach_tool(subtree, tree["children"], tool_id, &block)
+ return res unless res.nil?
end
- nil
+ end
+ else
+ tree["children"].each do |subtree|
+ res = detach_tool(subtree, tree["children"], tool_id, &block)
+ return res unless res.nil?
+ end
end
+ nil
+ end
- def gather_ids(tree, ids=[])
- tree['children'].each do |subtree|
- ids.concat(gather_ids(subtree))
- end
- if tree.has_key?('tool')
- ids << tree['tool']['id']
- end
- return ids
+ def gather_ids(tree, ids = [])
+ tree["children"].each do |subtree|
+ ids.concat(gather_ids(subtree))
end
-
+ if tree.has_key?("tool")
+ ids << tree["tool"]["id"]
+ end
+ return ids
+ end
end
diff --git a/app/models/issue.rb b/app/models/issue.rb
index e61754174091d46198b9cf3cadc78afeaec24b36..bb289518e3c8facaeb777731955dd92ea959370b 100644
--- a/app/models/issue.rb
+++ b/app/models/issue.rb
@@ -1,103 +1,102 @@
class Issue
+ attr_accessor :id, :title, :date_created, :language, :original_uri, :nb_pages, :all_text, :thumbnail_url, :newspaper, :pages, :articles
- attr_accessor :id, :title, :date_created, :language, :original_uri, :nb_pages, :all_text, :thumbnail_url, :newspaper, :pages, :articles
+ def self.from_solr(id, with_pages = false, with_articles = false)
+ solr_doc = SolrSearcher.get_doc_by_id id
+ Issue.from_solr_doc(solr_doc, with_pages, with_articles)
+ end
- def self.from_solr(id, with_pages=false, with_articles=false)
- solr_doc = SolrSearcher.get_doc_by_id id
- Issue.from_solr_doc(solr_doc, with_pages, with_articles)
+ def self.from_solr_doc(solr_doc, with_pages = false, with_articles = false)
+ i = Issue.new
+ i.id = solr_doc["id"]
+ i.language = solr_doc["language_ssi"]
+ i.newspaper = solr_doc["member_of_collection_ids_ssim"][0]
+ i.title = solr_doc["title_ssi"]
+ i.date_created = solr_doc["date_created_ssi"]
+ i.original_uri = solr_doc["original_uri_ss"]
+ i.nb_pages = solr_doc["member_ids_ssim"].size
+ i.thumbnail_url = solr_doc["thumbnail_url_ss"]
+ i.all_text = solr_doc["all_text_t#{i.language}_siv"]
+ if with_pages
+ i.pages = []
+ solr_doc["member_ids_ssim"].each do |pageid|
+ i.pages << Page.from_solr(pageid)
+ end
end
-
- def self.from_solr_doc(solr_doc, with_pages=false, with_articles=false)
- i = Issue.new
- i.id = solr_doc['id']
- i.language = solr_doc['language_ssi']
- i.newspaper = solr_doc['member_of_collection_ids_ssim'][0]
- i.title = solr_doc['title_ssi']
- i.date_created = solr_doc['date_created_ssi']
- i.original_uri = solr_doc['original_uri_ss']
- i.nb_pages = solr_doc['member_ids_ssim'].size
- i.thumbnail_url = solr_doc['thumbnail_url_ss']
- i.all_text = solr_doc["all_text_t#{i.language}_siv"]
- if with_pages
- i.pages = []
- solr_doc['member_ids_ssim'].each do |pageid|
- i.pages << Page.from_solr(pageid)
- end
- end
- if with_articles
- i.articles = []
- articles_docs = SolrSearcher.query({q: "*:*", fq: ["from_issue_ssi:#{i.id}", "has_model_ssim:Article"], fl:"*", rows:10000})['response']['docs']
- articles_docs.each do |articles_doc|
- i.articles << Article.from_solr_doc(articles_doc)
- end
- end
- i
+ if with_articles
+ i.articles = []
+ articles_docs = SolrSearcher.query({ q: "*:*", fq: ["from_issue_ssi:#{i.id}", "has_model_ssim:Article"], fl: "*", rows: 10000 })["response"]["docs"]
+ articles_docs.each do |articles_doc|
+ i.articles << Article.from_solr_doc(articles_doc)
+ end
end
+ i
+ end
- def to_solr
- solr_doc = {}
- solr_doc['id'] = self.id
- solr_doc['has_model_ssim'] = 'Issue'
- solr_doc['title_ssi'] = self.title
- solr_doc['date_created_ssi'] = self.date_created
- solr_doc['date_created_dtsi'] = DateTime.parse(self.date_created).strftime('%Y-%m-%dT%H:%M:%SZ')
- solr_doc['language_ssi'] = self.language
- solr_doc['original_uri_ss'] = self.original_uri
- solr_doc['nb_pages_isi'] = self.nb_pages
- solr_doc['thumbnail_url_ss'] = self.thumbnail_url
- solr_doc['member_ids_ssim'] = self.pages.map(&:id)
- solr_doc['year_isi'] = solr_doc['date_created_ssi'][0..3].to_i
- d = DateTime.parse solr_doc["date_created_dtsi"]
- solr_doc['month_isi'] = d.month
- solr_doc['day_isi'] = d.wday
- solr_doc["member_of_collection_ids_ssim"] = self.newspaper
- solr_doc["all_text_t#{self.language}_siv"] = self.all_text
- solr_doc["all_text_unstemmed_t#{self.language}_siv"] = self.all_text
- solr_doc
- end
+ def to_solr
+ solr_doc = {}
+ solr_doc["id"] = self.id
+ solr_doc["has_model_ssim"] = "Issue"
+ solr_doc["title_ssi"] = self.title
+ solr_doc["date_created_ssi"] = self.date_created
+ solr_doc["date_created_dtsi"] = DateTime.parse(self.date_created).strftime("%Y-%m-%dT%H:%M:%SZ")
+ solr_doc["language_ssi"] = self.language
+ solr_doc["original_uri_ss"] = self.original_uri
+ solr_doc["nb_pages_isi"] = self.nb_pages
+ solr_doc["thumbnail_url_ss"] = self.thumbnail_url
+ solr_doc["member_ids_ssim"] = self.pages.map(&:id)
+ solr_doc["year_isi"] = solr_doc["date_created_ssi"][0..3].to_i
+ d = DateTime.parse solr_doc["date_created_dtsi"]
+ solr_doc["month_isi"] = d.month
+ solr_doc["day_isi"] = d.wday
+ solr_doc["member_of_collection_ids_ssim"] = self.newspaper
+ solr_doc["all_text_t#{self.language}_siv"] = self.all_text
+ solr_doc["all_text_unstemmed_t#{self.language}_siv"] = self.all_text
+ solr_doc
+ end
- def get_thumbnail
- if Rails.configuration.iiif_sources[:local].include? self.newspaper
- "https://iiif.newseye.eu/iiif/#{self.newspaper}/#{self.id}_page_1.ptif/full/200,/0/default.jpg"
- elsif Rails.configuration.iiif_sources[:external].include? self.newspaper
- iiif_pages = self.pages.map{ |p| "#{p.iiif_url}/info.json" } # to change
- elsif Rails.configuration.iiif_sources[:external_onb].include? self.newspaper
- iiif_pages = self.pages.map{ |p| "#{p.iiif_url}/info.json" } # to change
- end
+ def get_thumbnail
+ if Rails.configuration.iiif_sources[:local].include? self.newspaper
+ "https://iiif.newseye.eu/iiif/#{self.newspaper}/#{self.id}_page_1.ptif/full/200,/0/default.jpg"
+ elsif Rails.configuration.iiif_sources[:external].include? self.newspaper
+ iiif_pages = self.pages.map { |p| "#{p.iiif_url}/info.json" } # to change
+ elsif Rails.configuration.iiif_sources[:external_onb].include? self.newspaper
+ iiif_pages = self.pages.map { |p| "#{p.iiif_url}/info.json" } # to change
end
+ end
- def get_iiif_urls
- if Rails.configuration.iiif_sources[:local].include? self.newspaper
- iiif_pages = self.pages.map do |p|
- "https://iiif.newseye.eu/iiif/#{self.newspaper}/#{self.id}_page_#{p.page_number}.ptif/info.json"
- end
- elsif Rails.configuration.iiif_sources[:external].include? self.newspaper
- iiif_pages = self.pages.map{ |p| "#{p.iiif_url}/info.json" }
- elsif Rails.configuration.iiif_sources[:external_onb].include? self.newspaper
- iiif_pages = self.pages.map{ |p| "#{p.iiif_url}/info.json" }
- end
- iiif_pages
+ def get_iiif_urls
+ if Rails.configuration.iiif_sources[:local].include? self.newspaper
+ iiif_pages = self.pages.map do |p|
+ "https://iiif.newseye.eu/iiif/#{self.newspaper}/#{self.id}_page_#{p.page_number}.ptif/info.json"
+ end
+ elsif Rails.configuration.iiif_sources[:external].include? self.newspaper
+ iiif_pages = self.pages.map { |p| "#{p.iiif_url}/info.json" }
+ elsif Rails.configuration.iiif_sources[:external_onb].include? self.newspaper
+ iiif_pages = self.pages.map { |p| "#{p.iiif_url}/info.json" }
end
+ iiif_pages
+ end
- def self.named_entities(issue_id)
- nems = SolrSearcher.query({q:"issue_id_ssi:#{issue_id}", rows: 1000000})['response']['docs']
- output = {LOC: {}, PER: {}, ORG: {}, HumanProd: {}}
- nems.select {|ne_solr| ne_solr['type_ssi'] == "LOC"}.each do |ne_solr|
- output[:LOC][ne_solr['linked_entity_ssi']] = [] unless output[:LOC].has_key? ne_solr['linked_entity_ssi']
- output[:LOC][ne_solr['linked_entity_ssi']].append(ne_solr)
- end
- nems.select {|ne_solr| ne_solr['type_ssi'] == "PER"}.each do |ne_solr|
- output[:PER][ne_solr['linked_entity_ssi']] = [] unless output[:PER].has_key? ne_solr['linked_entity_ssi']
- output[:PER][ne_solr['linked_entity_ssi']].append(ne_solr)
- end
- nems.select {|ne_solr| ne_solr['type_ssi'] == "ORG"}.each do |ne_solr|
- output[:ORG][ne_solr['linked_entity_ssi']] = [] unless output[:ORG].has_key? ne_solr['linked_entity_ssi']
- output[:ORG][ne_solr['linked_entity_ssi']].append(ne_solr)
- end
- nems.select {|ne_solr| ne_solr['type_ssi'] == "HumanProd"}.each do |ne_solr|
- output[:HumanProd][ne_solr['linked_entity_ssi']] = [] unless output[:HumanProd].has_key? ne_solr['linked_entity_ssi']
- output[:HumanProd][ne_solr['linked_entity_ssi']].append(ne_solr)
- end
- output
+ def self.named_entities(issue_id)
+ nems = SolrSearcher.query({ q: "issue_id_ssi:#{issue_id}", rows: 1000000 })["response"]["docs"]
+ output = { LOC: {}, PER: {}, ORG: {}, HumanProd: {} }
+ nems.select { |ne_solr| ne_solr["type_ssi"] == "LOC" }.each do |ne_solr|
+ output[:LOC][ne_solr["linked_entity_ssi"]] = [] unless output[:LOC].has_key? ne_solr["linked_entity_ssi"]
+ output[:LOC][ne_solr["linked_entity_ssi"]].append(ne_solr)
+ end
+ nems.select { |ne_solr| ne_solr["type_ssi"] == "PER" }.each do |ne_solr|
+ output[:PER][ne_solr["linked_entity_ssi"]] = [] unless output[:PER].has_key? ne_solr["linked_entity_ssi"]
+ output[:PER][ne_solr["linked_entity_ssi"]].append(ne_solr)
+ end
+ nems.select { |ne_solr| ne_solr["type_ssi"] == "ORG" }.each do |ne_solr|
+ output[:ORG][ne_solr["linked_entity_ssi"]] = [] unless output[:ORG].has_key? ne_solr["linked_entity_ssi"]
+ output[:ORG][ne_solr["linked_entity_ssi"]].append(ne_solr)
+ end
+ nems.select { |ne_solr| ne_solr["type_ssi"] == "HumanProd" }.each do |ne_solr|
+ output[:HumanProd][ne_solr["linked_entity_ssi"]] = [] unless output[:HumanProd].has_key? ne_solr["linked_entity_ssi"]
+ output[:HumanProd][ne_solr["linked_entity_ssi"]].append(ne_solr)
end
-end
\ No newline at end of file
+ output
+ end
+end
diff --git a/app/models/notification.rb b/app/models/notification.rb
index d2bb227c3ca5349e3e0481b3b85e5cb34c289e34..be5427653e6b7f8da4a4c42692410e518cf06258 100644
--- a/app/models/notification.rb
+++ b/app/models/notification.rb
@@ -1,6 +1,3 @@
class Notification < ActiveRecord::Base
-
- belongs_to :user, optional: false
-
-
-end
\ No newline at end of file
+ belongs_to :user, optional: false
+end
diff --git a/app/models/page.rb b/app/models/page.rb
index 87f311c37a796a06db41c14857e1344943f0654d..0ba49faa195ed7b4de69a1d68cc5fdcd1d48c236 100644
--- a/app/models/page.rb
+++ b/app/models/page.rb
@@ -1,31 +1,30 @@
class Page
+ attr_accessor :id, :page_number, :width, :height, :mime_type, :iiif_url, :ocr_path, :image_path
- attr_accessor :id, :page_number, :width, :height, :mime_type, :iiif_url, :ocr_path, :image_path
+ def self.from_solr(id)
+ attrs = SolrSearcher.get_doc_by_id id
+ p = Page.new
+ p.id = attrs["id"]
+ p.page_number = attrs["page_number_isi"]
+ p.width = attrs["width_isi"]
+ p.height = attrs["height_isi"]
+ p.mime_type = attrs["mime_type_ssi"]
+ p.iiif_url = attrs["iiif_url_ss"]
+ p.ocr_path = attrs["ocr_path_ss"]
+ p.image_path = attrs["image_path_ss"] if attrs["image_path_ss"]
+ p
+ end
- def self.from_solr id
- attrs = SolrSearcher.get_doc_by_id id
- p = Page.new
- p.id = attrs['id']
- p.page_number = attrs['page_number_isi']
- p.width = attrs['width_isi']
- p.height = attrs['height_isi']
- p.mime_type = attrs['mime_type_ssi']
- p.iiif_url = attrs['iiif_url_ss']
- p.ocr_path = attrs['ocr_path_ss']
- p.image_path = attrs['image_path_ss'] if attrs['image_path_ss']
- p
- end
-
- def to_solr
- solr_doc = {}
- solr_doc['id'] = self.id
- solr_doc['has_model_ssim'] = 'PageFileSet'
- solr_doc['page_number_isi'] = self.page_number
- solr_doc['width_isi'] = self.width
- solr_doc['height_isi'] = self.height
- solr_doc['mime_type_ssi'] = self.mime_type
- solr_doc['iiif_url_ss'] = self.iiif_url
- solr_doc['ocr_path_ss'] = self.ocr_path
- solr_doc
- end
-end
\ No newline at end of file
+ def to_solr
+ solr_doc = {}
+ solr_doc["id"] = self.id
+ solr_doc["has_model_ssim"] = "PageFileSet"
+ solr_doc["page_number_isi"] = self.page_number
+ solr_doc["width_isi"] = self.width
+ solr_doc["height_isi"] = self.height
+ solr_doc["mime_type_ssi"] = self.mime_type
+ solr_doc["iiif_url_ss"] = self.iiif_url
+ solr_doc["ocr_path_ss"] = self.ocr_path
+ solr_doc
+ end
+end
diff --git a/app/models/solr_query.rb b/app/models/solr_query.rb
index 3e5edd7a6427a19154c15c217b6af02285dffaa0..80ae868b4b9799216d6b8d5beec0169c290950a2 100644
--- a/app/models/solr_query.rb
+++ b/app/models/solr_query.rb
@@ -1,54 +1,52 @@
class SolrQuery
+ attr_accessor :defType, :sort, :start, :rows, :fq, :fl, # common parameters
+ :q, :q_dot_alt, :qf, :mm, :pf, :ps, :qs, :tie, :bq, :bf, # Dismax parameters
+ :sow, :mm_dot_autorelax, :boost, :lowercaseOperators, :pf2, :ps2, :pf3, :ps3, :stopwords, :uf, # Edismax parameters
+ :facet, :facet_dot_field, :facet_dot_threads,
+ :hl,
+ :mlt
- attr_accessor :defType, :sort, :start, :rows, :fq, :fl, # common parameters
- :q, :q_dot_alt, :qf, :mm, :pf, :ps, :qs, :tie, :bq, :bf, # Dismax parameters
- :sow, :mm_dot_autorelax, :boost, :lowercaseOperators, :pf2, :ps2, :pf3, :ps3, :stopwords, :uf, # Edismax parameters
- :facet, :facet_dot_field, :facet_dot_threads,
- :hl,
- :mlt
+ def initialize(search_type)
+ @defType = "edismax"
+ @sort = "score desc"
+ @start = 0
+ @rows = 10
+ # @fq = ["has_model_ssim:(Article OR Issue)"]
+ @fq = ["has_model_ssim:(Article)"]
+ @fl = "*,score"
+ @q = "*:*"
+ @q_dot_alt = "*:*"
+ @qf = I18n.t("newspapers.solr_fields").select { |k, v| k.start_with?(search_type == "stemmed" ? "text_stemmed" : "text_exact") }.values # or text_stemmed
+ @mm = 1
+ @pf = ""
+ @ps = ""
+ @qs = ""
+ @tie = 0.1
+ @bq = ""
+ @bf = ""
+ @hl = true
+ @hl_dot_fl = @qf
- def initialize search_type
- @defType = 'edismax'
- @sort = 'score desc'
- @start = 0
- @rows = 10
- # @fq = ["has_model_ssim:(Article OR Issue)"]
- @fq = ["has_model_ssim:(Article)"]
- @fl = '*,score'
- @q = '*:*'
- @q_dot_alt = '*:*'
- @qf = I18n.t("newspapers.solr_fields").select{|k,v| k.start_with?( search_type=="stemmed" ? "text_stemmed" : "text_exact") }.values # or text_stemmed
- @mm = 1
- @pf = ""
- @ps = ""
- @qs = ""
- @tie = 0.1
- @bq = ""
- @bf = ""
- @hl = true
- @hl_dot_fl = @qf
-
- @json_dot_facet = {}
- I18n.t("newspapers.solr_fields").values_at(:language, :newspaper).each do |f|
- @json_dot_facet[f] = { terms: { field: f, limit: 15, numBuckets: true} }
- end
- I18n.t("newspapers.solr_fields").values_at(:date).each do |f|
- @json_dot_facet[f] = { terms: { field: f, limit: -1, numBuckets: true} }
- end
- I18n.t("newspapers.solr_fields").values_at(:month, :day).each do |f|
- @json_dot_facet[f] = { terms: { field: f, limit: 15, numBuckets: true, sort: {index: "asc"}} }
- end
- I18n.t("newspapers.solr_fields").values_at(:persons, :locations, :organisations, :human_productions).each do |f|
- @json_dot_facet[f] = { terms: { field: f, limit: 15, numBuckets: true} }
- end
- @json_dot_facet["min_date"] = "min(date_created_dtsi)"
- @json_dot_facet["max_date"] = "max(date_created_dtsi)"
+ @json_dot_facet = {}
+ I18n.t("newspapers.solr_fields").values_at(:language, :newspaper).each do |f|
+ @json_dot_facet[f] = { terms: { field: f, limit: 15, numBuckets: true } }
end
-
- def to_params
- p = self.instance_values.select {|k,v| v != "" and !v.nil?}.transform_keys{|k| k.gsub('_dot_','.')}.with_indifferent_access
- p["json.facet"] = p["json.facet"].to_json
- p
+ I18n.t("newspapers.solr_fields").values_at(:date).each do |f|
+ @json_dot_facet[f] = { terms: { field: f, limit: -1, numBuckets: true } }
+ end
+ I18n.t("newspapers.solr_fields").values_at(:month, :day).each do |f|
+ @json_dot_facet[f] = { terms: { field: f, limit: 15, numBuckets: true, sort: { index: "asc" } } }
+ end
+ I18n.t("newspapers.solr_fields").values_at(:persons, :locations, :organisations, :human_productions).each do |f|
+ @json_dot_facet[f] = { terms: { field: f, limit: 15, numBuckets: true } }
end
+ @json_dot_facet["min_date"] = "min(date_created_dtsi)"
+ @json_dot_facet["max_date"] = "max(date_created_dtsi)"
+ end
-end
\ No newline at end of file
+ def to_params
+ p = self.instance_values.select { |k, v| v != "" and !v.nil? }.transform_keys { |k| k.gsub("_dot_", ".") }.with_indifferent_access
+ p["json.facet"] = p["json.facet"].to_json
+ p
+ end
+end
diff --git a/app/models/solr_searcher.rb b/app/models/solr_searcher.rb
index e2c4add2ac0b718e727d7406fe09270e098aa5b2..664c1ebb31eaf7ce08ee863f3653a536359dcfa0 100644
--- a/app/models/solr_searcher.rb
+++ b/app/models/solr_searcher.rb
@@ -1,26 +1,25 @@
class SolrSearcher
- include AbstractSearcher
+ include AbstractSearcher
- @@connection = false
+ @@connection = false
- def self.query params
- connect unless @@connection
- puts "[SolrSearcher.Query] #{params.to_json}\n" if Rails.env == "development"
- @@connection.send_and_receive("select", data: params, method: :post)
- end
+ def self.query(params)
+ connect unless @@connection
+ # puts "[SolrSearcher.Query] #{params.to_json}\n" if Rails.env == "development"
+ @@connection.send_and_receive("select", data: params, method: :post)
+ end
- def self.connect
- @@connection = RSolr.connect(url: Rails.configuration.solr['url']) unless @@connection
- end
+ def self.connect
+ @@connection = RSolr.connect(url: Rails.configuration.solr["url"]) unless @@connection
+ end
- def self.get_doc_by_id(id)
- connect unless @@connection
- docs = @@connection.send_and_receive("select", data: {q: "id:#{id}"}, method: :post)['response']['docs']
- if docs.empty?
- nil
- else
- docs[0]
- end
+ def self.get_doc_by_id(id)
+ connect unless @@connection
+ docs = @@connection.send_and_receive("select", data: { q: "id:#{id}" }, method: :post)["response"]["docs"]
+ if docs.empty?
+ nil
+ else
+ docs[0]
end
-
-end
\ No newline at end of file
+ end
+end
diff --git a/app/models/tool.rb b/app/models/tool.rb
index 35b0b6b528fb56e36394d0db1fe797bc5bbbc222..599c79670dfa0c5fba18a0e8e9a5043d1a13b98f 100644
--- a/app/models/tool.rb
+++ b/app/models/tool.rb
@@ -1,22 +1,20 @@
class Tool < ActiveRecord::Base
+ belongs_to :experiment, optional: false
- belongs_to :experiment, optional: false
-
- def to_h
- {
+ def to_h
+ {
"tool": {
- "id": self.id
+ "id": self.id,
},
- "children": []
+ "children": [],
}
- end
-
- def runnable?
- self.status == "configured" && (self.parent_id.nil? || Tool.find(self.parent_id).status == "finished")
- end
+ end
- def run(continue=false)
- "#{self.tool_type}_worker".camelize.constantize.perform_async(self.id, self.experiment.user.id, self.experiment.id, self.tool_type, self.parameters, continue)
- end
+ def runnable?
+ self.status == "configured" && (self.parent_id.nil? || Tool.find(self.parent_id).status == "finished")
+ end
+ def run(continue = false)
+ "#{self.tool_type}_worker".camelize.constantize.perform_async(self.id, self.experiment.user.id, self.experiment.id, self.tool_type, self.parameters, continue)
+ end
end
diff --git a/app/models/user.rb b/app/models/user.rb
index 84c7de53f8ca69d073df21ac717eca743f89f29d..08fcfee751ba9247d43193abb783e414d26acf42 100644
--- a/app/models/user.rb
+++ b/app/models/user.rb
@@ -1,31 +1,29 @@
class User < ApplicationRecord
- # Include default devise modules. Others available are:
- # :confirmable, :lockable, :timeoutable, :trackable and :omniauthable
- devise :database_authenticatable, :registerable,
- :recoverable, :rememberable, :validatable
+ attribute :labs_user_id, presence: true, unique: true
+ attribute :labs_user_name, presence: true, unique: true
- has_many :experiments
- has_many :datasets
- has_many :notifications
- has_many :compound_articles
+ has_many :experiments
+ has_many :datasets
+ has_many :notifications
+ has_many :compound_articles
+ has_many :active_sessions
- def datasets_with_doc doc_id
- self.datasets.map do |dataset|
- [dataset.id, dataset.title] if dataset.contains doc_id.to_s
- end.delete_if(&:nil?)
- end
-
- def compounds_by_issue
- out = {}
- self.compound_articles.each do |compound_article|
- out[compound_article.issue_id] = [] unless out.has_key? compound_article.issue_id
- out[compound_article.issue_id] << compound_article
- end
- out
- end
+ def datasets_with_doc(doc_id)
+ self.datasets.map do |dataset|
+ [dataset.id, dataset.title] if dataset.contains doc_id.to_s
+ end.delete_if(&:nil?)
+ end
- def researcher?
- Rails.configuration.auths['emails'].include? self.email
+ def compounds_by_issue
+ out = {}
+ self.compound_articles.each do |compound_article|
+ out[compound_article.issue_id] = [] unless out.has_key? compound_article.issue_id
+ out[compound_article.issue_id] << compound_article
end
+ out
+ end
+ def researcher?
+ Rails.configuration.auths["emails"].include? self.email
+ end
end
diff --git a/app/views/catalog/_compound_articles_panel.html.erb b/app/views/catalog/_compound_articles_panel.html.erb
index dcc4393ec1317dc22de8931d71a39de6454a0115..ffbceabc64c50cd2ce2dd8c3457ffded6ee08d64 100644
--- a/app/views/catalog/_compound_articles_panel.html.erb
+++ b/app/views/catalog/_compound_articles_panel.html.erb
@@ -7,9 +7,9 @@
diff --git a/app/views/catalog/_date_facet.html.erb b/app/views/catalog/_date_facet.html.erb
index f3569b9a812aa29c46669ab204954d8e76e236b7..c054514e934d9e042bced1a4142d88cef54c448c 100644
--- a/app/views/catalog/_date_facet.html.erb
+++ b/app/views/catalog/_date_facet.html.erb
@@ -15,7 +15,7 @@
diff --git a/app/views/catalog/_result.html.erb b/app/views/catalog/_result.html.erb
index 1638db848b24ce10c2d711d3baae4f4f931bd81d..07aad3bc375c36e958b55ce7203ba8d8c1ee512c 100644
--- a/app/views/catalog/_result.html.erb
+++ b/app/views/catalog/_result.html.erb
@@ -27,7 +27,7 @@
<% end %>
-
+
diff --git a/app/views/catalog/_result_datasets.html.erb b/app/views/catalog/_result_datasets.html.erb
index c192c0b550496f9d75cefe2fa41deb2b0c9448bf..7520ee404801bbb6f2c169074943a98dd49acd90 100644
--- a/app/views/catalog/_result_datasets.html.erb
+++ b/app/views/catalog/_result_datasets.html.erb
@@ -4,7 +4,7 @@
Currently belongs to:
<% in_datasets.each do |dataset_id, dataset_title| %>
<%= link_to "/dataset/#{dataset_id}" do %>
-
<%= dataset_title %>
+
<%= dataset_title %>
<% end %>
<% end %>
diff --git a/app/views/catalog/_search_form.html.erb b/app/views/catalog/_search_form.html.erb
index 10d476926bbee2d383c18f126089fb2135687f47..c8f0311760a09e5111d4c86e7d6bbbafcca2125e 100644
--- a/app/views/catalog/_search_form.html.erb
+++ b/app/views/catalog/_search_form.html.erb
@@ -1,16 +1,16 @@
\ No newline at end of file
diff --git a/app/views/dataset/_documents.html.erb b/app/views/dataset/_documents.html.erb
index bf97719e7dab93c3cfb9e0712f4de19392813560..5ffc056d90ba9ad8df3ba597050cbdc394c2761e 100644
--- a/app/views/dataset/_documents.html.erb
+++ b/app/views/dataset/_documents.html.erb
@@ -1,7 +1,7 @@
<% docs.each_with_index do |doc, idx| %>
<%# Highlight will be used when searching into a dataset %>
- <%= render partial: "document", locals: {doc: doc, highlight: nil, doc_index: (pagenum-1) * rows + idx + 1} %>
+ <%= render partial: "document", locals: { doc: doc, highlight: doc.all_text, doc_index: (pagenum - 1) * rows + idx + 1 } %>
<% end %>
\ No newline at end of file
diff --git a/app/views/dataset/_pagination.html.erb b/app/views/dataset/_pagination.html.erb
index 766da9c5039ae9baeac136ff89884b7cbfe6a8fc..7e2c650095db8fe689a6702eaeb98ddf7d1dc630 100644
--- a/app/views/dataset/_pagination.html.erb
+++ b/app/views/dataset/_pagination.html.erb
@@ -7,9 +7,18 @@
<% if (nb_pages > 10) %>
<% (1..nb_pages).each do |i| %>
<% if (i >= current_page-2 and i <= current_page+2) or (i <= 1) or (i>= nb_pages) %>
-
" data-action="click->dataset#page_button">
- <%= i %>
-
+ <% if i == current_page %>
+
+
+
+
+
+
+ <% else %>
+
+ <%= i %>
+
+ <% end %>
<% elsif (i == 2 and current_page >= 5) or (i == nb_pages-1 and current_page <= nb_pages-4) %>
...
@@ -18,9 +27,18 @@
<% end %>
<% else %>
<% (1..nb_pages).each do |i| %>
- " data-action="click->dataset#page_button">
- <%= i %>
-
+ <% if i == current_page %>
+
+
+
+
+
+
+ <% else %>
+
+ <%= i %>
+
+ <% end %>
<% end %>
<% end %>
" data-action="click->dataset#next_page">
diff --git a/app/views/dataset/_public_datasets_modal.html.erb b/app/views/dataset/_public_datasets_modal.html.erb
index e2dd8bed90b717bd7ae15c1ac9ab8ccd6ea5bcc5..98614d5c8eccbc836e672ab454477cb40c6783e4 100644
--- a/app/views/dataset/_public_datasets_modal.html.erb
+++ b/app/views/dataset/_public_datasets_modal.html.erb
@@ -28,7 +28,8 @@
<% user = User.find(d.user_id) %>
- <% username = user.email[0...user.email.index('@')] %>
+ <%# <% username = user.email[0...user.email.index('@')] %>
+ <% username = user.labs_user_name %>
<%= username %>
diff --git a/app/views/dataset/add_documents.js.erb b/app/views/dataset/add_documents.js.erb
index de326e7676531a3a24a3f98d9a9db52d5f58cf19..6bfb328805f051b9477899fd1f330188b2122118 100644
--- a/app/views/dataset/add_documents.js.erb
+++ b/app/views/dataset/add_documents.js.erb
@@ -2,7 +2,8 @@
$("#notifications").append("<%= j render(partial: "shared/notification",
locals: {
notif_title: "Dataset modified",
- notif_content: content
+ notif_content: content,
+ notif_autohide: "true"
}) %>")
for(const notif of $('.toast')) {
diff --git a/app/views/dataset/set_working_dataset.js.erb b/app/views/dataset/set_working_dataset.js.erb
index 877d6fbe60edb9be6eae06f95ea486cbf5537d20..b0a2f4ea1454fbc91fbdb8b46f2da2f817da4e01 100644
--- a/app/views/dataset/set_working_dataset.js.erb
+++ b/app/views/dataset/set_working_dataset.js.erb
@@ -2,7 +2,8 @@
$("#notifications").append("<%= j render(partial: "shared/notification",
locals: {
notif_title: "Working dataset",
- notif_content: content
+ notif_content: content,
+ notif_autohide: "true"
}) %>")
for(let notif of $('.toast')) {
diff --git a/app/views/dataset/show.html.erb b/app/views/dataset/show.html.erb
index 442dc6b51658e747cce16031cf30dca5511ebf5f..23bbb1d2a9f28595c261c91bb43720d7c156f082 100644
--- a/app/views/dataset/show.html.erb
+++ b/app/views/dataset/show.html.erb
@@ -3,8 +3,9 @@
diff --git a/app/views/layouts/application.html.erb b/app/views/layouts/application.html.erb
index 5e33ac4cb6faaa264484f7dc3f628ca65dddd51e..a689c50f04fcc1ad57c62f848fab2ff6ef06408a 100644
--- a/app/views/layouts/application.html.erb
+++ b/app/views/layouts/application.html.erb
@@ -32,6 +32,6 @@
<%= yield %>
-
+