aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--Gemfile1
-rw-r--r--Gemfile.lock2
-rw-r--r--app/controllers/general_controller.rb45
-rw-r--r--app/controllers/request_controller.rb16
-rw-r--r--app/helpers/application_helper.rb7
-rw-r--r--app/models/info_request.rb63
-rw-r--r--app/models/public_body.rb24
-rw-r--r--app/views/general/_frontpage_bodies_list.html.erb5
-rw-r--r--app/views/general/_frontpage_requests_list.html.erb1
-rw-r--r--app/views/general/frontpage.html.erb3
-rw-r--r--app/views/request/_sidebar.html.erb10
-rw-r--r--config/application.rb4
-rw-r--r--config/general.yml-example5
-rw-r--r--config/packages1
-rw-r--r--lib/configuration.rb1
-rw-r--r--lib/tasks/temp.rake316
-rw-r--r--spec/controllers/general_controller_spec.rb42
-rw-r--r--spec/models/info_request_spec.rb58
18 files changed, 215 insertions, 389 deletions
diff --git a/Gemfile b/Gemfile
index 186251de4..5cb705a07 100644
--- a/Gemfile
+++ b/Gemfile
@@ -20,6 +20,7 @@ gem 'fastercsv', '>=1.5.5'
gem 'jquery-rails', '~> 2.1'
gem 'json'
gem 'mahoro'
+gem 'memcache-client'
gem 'net-http-local'
gem 'net-purge'
gem 'newrelic_rpm'
diff --git a/Gemfile.lock b/Gemfile.lock
index 989920a72..53f77e1e3 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -125,6 +125,7 @@ GEM
skinny (~> 0.2.3)
sqlite3 (~> 1.3)
thin (~> 1.5.0)
+ memcache-client (1.8.5)
mime-types (1.23)
multi_json (1.7.4)
net-http-local (0.1.2)
@@ -273,6 +274,7 @@ DEPENDENCIES
locale
mahoro
mailcatcher
+ memcache-client
net-http-local
net-purge
newrelic_rpm
diff --git a/app/controllers/general_controller.rb b/app/controllers/general_controller.rb
index beefef4e6..aac078829 100644
--- a/app/controllers/general_controller.rb
+++ b/app/controllers/general_controller.rb
@@ -12,52 +12,7 @@ class GeneralController < ApplicationController
# New, improved front page!
def frontpage
medium_cache
- # get some example searches and public bodies to display
- # either from config, or based on a (slow!) query if not set
- body_short_names = AlaveteliConfiguration::frontpage_publicbody_examples.split(/\s*;\s*/).map{|s| "'%s'" % s.gsub(/'/, "''") }.join(", ")
@locale = self.locale_from_params()
- locale_condition = 'public_body_translations.locale = ?'
- conditions = [locale_condition, @locale]
- I18n.with_locale(@locale) do
- if body_short_names.empty?
- # This is too slow
- @popular_bodies = PublicBody.visible.find(:all,
- :order => "info_requests_count desc",
- :limit => 32,
- :conditions => conditions,
- :joins => :translations
- )
- else
- conditions[0] += " and public_bodies.url_name in (" + body_short_names + ")"
- @popular_bodies = PublicBody.find(:all,
- :conditions => conditions,
- :joins => :translations)
- end
- end
- # Get some successful requests
- begin
- query = 'variety:response (status:successful OR status:partially_successful)'
- sortby = "newest"
- max_count = 5
- xapian_object = perform_search([InfoRequestEvent], query, sortby, 'request_title_collapse', max_count)
- @request_events = xapian_object.results.map { |r| r[:model] }
-
- # If there are not yet enough successful requests, fill out the list with
- # other requests
- if @request_events.count < max_count
- @request_events_all_successful = false
- query = 'variety:sent'
- xapian_object = perform_search([InfoRequestEvent], query, sortby, 'request_title_collapse', max_count-@request_events.count)
- more_events = xapian_object.results.map { |r| r[:model] }
- @request_events += more_events
- # Overall we still want the list sorted with the newest first
- @request_events.sort!{|e1,e2| e2.created_at <=> e1.created_at}
- else
- @request_events_all_successful = true
- end
- rescue
- @request_events = []
- end
end
# Display blog entries
diff --git a/app/controllers/request_controller.rb b/app/controllers/request_controller.rb
index 388473b51..841950cd5 100644
--- a/app/controllers/request_controller.rb
+++ b/app/controllers/request_controller.rb
@@ -92,15 +92,8 @@ class RequestController < ApplicationController
# Sidebar stuff
@sidebar = true
- # ... requests that have similar imporant terms
- begin
- limit = 10
- @xapian_similar = ActsAsXapian::Similar.new([InfoRequestEvent], @info_request.info_request_events,
- :limit => limit, :collapse_by_prefix => 'request_collapse')
- @xapian_similar_more = (@xapian_similar.matches_estimated > limit)
- rescue
- @xapian_similar = nil
- end
+ @similar_cache_key = cache_key_for_similar_requests(@info_request, @locale)
+
# Track corresponding to this page
@track_thing = TrackThing.create_track_for_request(@info_request)
@feed_autodetect = [ { :url => do_track_url(@track_thing, 'feed'), :title => @track_thing.params[:title_in_rss], :has_json => true } ]
@@ -971,5 +964,10 @@ class RequestController < ApplicationController
file_info
end
+ def cache_key_for_similar_requests(info_request, locale)
+ "request/similar/#{info_request.id}/#{locale}"
+ end
+
+
end
diff --git a/app/helpers/application_helper.rb b/app/helpers/application_helper.rb
index e3b1e57ac..0c346ab4e 100644
--- a/app/helpers/application_helper.rb
+++ b/app/helpers/application_helper.rb
@@ -116,5 +116,12 @@ module ApplicationHelper
return !session[:using_admin].nil? || (!@user.nil? && @user.super?)
end
+ def cache_if_caching_fragments(*args, &block)
+ if AlaveteliConfiguration::cache_fragments
+ cache(*args) { yield }
+ else
+ yield
+ end
+ end
end
diff --git a/app/models/info_request.rb b/app/models/info_request.rb
index 9463a236e..0a073dc79 100644
--- a/app/models/info_request.rb
+++ b/app/models/info_request.rb
@@ -1212,6 +1212,69 @@ public
end
end
+
+ # Get requests that have similar important terms
+ def similar_requests(limit=10)
+ xapian_similar = nil
+ xapian_similar_more = false
+ begin
+ xapian_similar = ActsAsXapian::Similar.new([InfoRequestEvent],
+ info_request_events,
+ :limit => limit,
+ :collapse_by_prefix => 'request_collapse')
+ xapian_similar_more = (xapian_similar.matches_estimated > limit)
+ rescue
+ end
+ return [xapian_similar, xapian_similar_more]
+ end
+
+ def InfoRequest.recent_requests
+ request_events = []
+ request_events_all_successful = false
+ # Get some successful requests
+ begin
+ query = 'variety:response (status:successful OR status:partially_successful)'
+ sortby = "newest"
+ max_count = 5
+
+ xapian_object = ActsAsXapian::Search.new([InfoRequestEvent],
+ query,
+ :offset => 0,
+ :limit => 5,
+ :sort_by_prefix => 'created_at',
+ :sort_by_ascending => true,
+ :collapse_by_prefix => 'request_title_collapse'
+ )
+ xapian_object.results
+ request_events = xapian_object.results.map { |r| r[:model] }
+
+ # If there are not yet enough successful requests, fill out the list with
+ # other requests
+ if request_events.count < max_count
+ query = 'variety:sent'
+ xapian_object = ActsAsXapian::Search.new([InfoRequestEvent],
+ query,
+ :offset => 0,
+ :limit => max_count-request_events.count,
+ :sort_by_prefix => 'created_at',
+ :sort_by_ascending => true,
+ :collapse_by_prefix => 'request_title_collapse'
+ )
+ xapian_object.results
+ more_events = xapian_object.results.map { |r| r[:model] }
+ request_events += more_events
+ # Overall we still want the list sorted with the newest first
+ request_events.sort!{|e1,e2| e2.created_at <=> e1.created_at}
+ else
+ request_events_all_successful = true
+ end
+ rescue
+ request_events = []
+ end
+
+ return [request_events, request_events_all_successful]
+ end
+
private
def set_defaults
diff --git a/app/models/public_body.rb b/app/models/public_body.rb
index db6359f6b..d2cfc4b8b 100644
--- a/app/models/public_body.rb
+++ b/app/models/public_body.rb
@@ -723,6 +723,30 @@ class PublicBody < ActiveRecord::Base
'y_max' => 100,
'totals' => original_totals}
end
+ def self.popular_bodies(locale)
+ # get some example searches and public bodies to display
+ # either from config, or based on a (slow!) query if not set
+ body_short_names = AlaveteliConfiguration::frontpage_publicbody_examples.split(/\s*;\s*/)
+ locale_condition = 'public_body_translations.locale = ?'
+ conditions = [locale_condition, locale]
+ bodies = []
+ I18n.with_locale(locale) do
+ if body_short_names.empty?
+ # This is too slow
+ bodies = visible.find(:all,
+ :order => "info_requests_count desc",
+ :limit => 32,
+ :conditions => conditions,
+ :joins => :translations
+ )
+ else
+ conditions[0] += " and public_bodies.url_name in (?)"
+ conditions << body_short_names
+ bodies = find(:all, :conditions => conditions, :joins => :translations)
+ end
+ end
+ return bodies
+ end
private
diff --git a/app/views/general/_frontpage_bodies_list.html.erb b/app/views/general/_frontpage_bodies_list.html.erb
index 75daea41d..44321f14a 100644
--- a/app/views/general/_frontpage_bodies_list.html.erb
+++ b/app/views/general/_frontpage_bodies_list.html.erb
@@ -1,10 +1,11 @@
-<% if @popular_bodies.size > 0 %>
+<%- popular_bodies = PublicBody.popular_bodies(@locale) %>
+<% if popular_bodies.size > 0 %>
<div id="examples_0">
<h3><%= _("Who can I request information from?") %></h3>
<%= _("{{site_name}} covers requests to {{number_of_authorities}} authorities, including:",
:site_name => site_name, :number_of_authorities => PublicBody.visible.count) %>
<ul>
- <% for popular_body in @popular_bodies %>
+ <% for popular_body in popular_bodies %>
<li><%=public_body_link(popular_body)%>
<%= n_('{{count}} request', '{{count}} requests', popular_body.info_requests_count, :count => popular_body.info_requests_count) %>
</li>
diff --git a/app/views/general/_frontpage_requests_list.html.erb b/app/views/general/_frontpage_requests_list.html.erb
index fa498dfa7..41a875cab 100644
--- a/app/views/general/_frontpage_requests_list.html.erb
+++ b/app/views/general/_frontpage_requests_list.html.erb
@@ -1,3 +1,4 @@
+<%- @request_events, @request_events_all_successful = InfoRequest.recent_requests %>
<div id="examples_1">
<h3>
<% if @request_events_all_successful %>
diff --git a/app/views/general/frontpage.html.erb b/app/views/general/frontpage.html.erb
index bf5261d15..8bb32bdf2 100644
--- a/app/views/general/frontpage.html.erb
+++ b/app/views/general/frontpage.html.erb
@@ -1,4 +1,4 @@
-<% # TODO: Cache for 5 minutes %>
+<% cache_if_caching_fragments("frontpage-#{@locale}", :expires_in => 5.minutes) do %>
<div id="frontpage_splash">
<div id="left_column">
<%= render :partial => "frontpage_new_request" %>
@@ -17,3 +17,4 @@
<%= render :partial => "frontpage_bodies_list" %>
<%= render :partial => "frontpage_requests_list" %>
</div>
+<% end %>
diff --git a/app/views/request/_sidebar.html.erb b/app/views/request/_sidebar.html.erb
index 8d4a4a2d8..8400cd6ac 100644
--- a/app/views/request/_sidebar.html.erb
+++ b/app/views/request/_sidebar.html.erb
@@ -51,16 +51,18 @@
<%= render :partial => 'request/next_actions' %>
- <% # TODO: Cache for 1 day %>
- <% if !@xapian_similar.nil? && @xapian_similar.results.size > 0 %>
+ <% cache_if_caching_fragments(@similar_cache_key, :expires_in => 1.day) do %>
+ <% xapian_similar, xapian_similar_more = @info_request.similar_requests %>
+ <% if !xapian_similar.nil? && xapian_similar.results.size > 0 %>
<h2><%= _('Similar requests')%></h2>
- <% for result in @xapian_similar.results %>
+ <% for result in xapian_similar.results %>
<%= render :partial => 'request/request_listing_short_via_event', :locals => { :event => result[:model], :info_request => result[:model].info_request } %>
<% end %>
- <% if @xapian_similar_more %>
+ <% if xapian_similar_more %>
<p><%= link_to _("More similar requests"), similar_request_path(@info_request.url_title) %></p>
<% end %>
<% end %>
+ <% end %>
<p><%= link_to _('Event history details'), request_details_path(@info_request) %></p>
diff --git a/config/application.rb b/config/application.rb
index ad5d4b03f..245a60782 100644
--- a/config/application.rb
+++ b/config/application.rb
@@ -55,6 +55,10 @@ module Alaveteli
# will be in this time zone
config.time_zone = ::AlaveteliConfiguration::time_zone
+ # Set the cache to use a memcached backend
+ config.cache_store = :mem_cache_store, { :namespace => AlaveteliConfiguration::domain }
+ config.action_dispatch.rack_cache = nil
+
config.after_initialize do |app|
require 'routing_filters.rb'
# Add a catch-all route to force routing errors to be handled by the application,
diff --git a/config/general.yml-example b/config/general.yml-example
index 60eb5ae1c..b8d9fc854 100644
--- a/config/general.yml-example
+++ b/config/general.yml-example
@@ -209,3 +209,8 @@ PUBLIC_BODY_LIST_FALLBACK_TO_DEFAULT_LOCALE: false
# If true, while in development mode, try to send mail by SMTP to port
# 1025 (the port the mailcatcher listens on by default):
USE_MAILCATCHER_IN_DEVELOPMENT: true
+
+# Use memcached to cache HTML fragments for better performance. Will
+# only have an effect in environments where
+# config.action_controller.perform_caching is set to true
+CACHE_FRAGMENTS: true
diff --git a/config/packages b/config/packages
index 8bb00a849..9a07c5f20 100644
--- a/config/packages
+++ b/config/packages
@@ -38,3 +38,4 @@ bundler
sqlite3
libsqlite3-dev
libicu-dev
+memcached
diff --git a/lib/configuration.rb b/lib/configuration.rb
index fba70f27c..2192433f7 100644
--- a/lib/configuration.rb
+++ b/lib/configuration.rb
@@ -21,6 +21,7 @@ module AlaveteliConfiguration
:AVAILABLE_LOCALES => '',
:BLACKHOLE_PREFIX => 'do-not-reply-to-this-address',
:BLOG_FEED => '',
+ :CACHE_FRAGMENTS => true,
:CONTACT_EMAIL => 'contact@localhost',
:CONTACT_NAME => 'Alaveteli',
:COOKIE_STORE_SESSION_SECRET => 'this default is insecure as code is open source, please override for live sites in config/general; this will do for local development',
diff --git a/lib/tasks/temp.rake b/lib/tasks/temp.rake
index d371ad0dc..bdee3027b 100644
--- a/lib/tasks/temp.rake
+++ b/lib/tasks/temp.rake
@@ -1,292 +1,40 @@
namespace :temp do
- desc "Fix the history of requests where the described state doesn't match the latest status value
- used by search, by adding an edit event that will correct the latest status"
- task :fix_bad_request_states => :environment do
- dryrun = ENV['DRYRUN'] != '0'
- if dryrun
- puts "This is a dryrun"
- end
-
- InfoRequest.find_each() do |info_request|
- next if info_request.url_title == 'holding_pen'
- last_info_request_event = info_request.info_request_events[-1]
- if last_info_request_event.latest_status != info_request.described_state
- puts "#{info_request.id} #{info_request.url_title} #{last_info_request_event.latest_status} #{info_request.described_state}"
- params = { :script => 'rake temp:fix_bad_request_states',
- :user_id => nil,
- :old_described_state => info_request.described_state,
- :described_state => info_request.described_state
- }
- if ! dryrun
- info_request.info_request_events.create!(:last_described_at => last_info_request_event.described_at + 1.second,
- :event_type => 'status_update',
- :described_state => info_request.described_state,
- :calculated_state => info_request.described_state,
- :params => params)
- info_request.info_request_events.each{ |event| event.xapian_mark_needs_index }
- end
- end
-
- end
- end
-
- def disable_duplicate_account(user, count, dryrun)
- dupe_email = "duplicateemail#{count}@example.com"
- puts "Updating #{user.email} to #{dupe_email} for user #{user.id}"
- user.email = dupe_email
- user.save! unless dryrun
- end
-
- desc "Re-extract any missing cached attachments"
- task :reextract_missing_attachments, [:commit] => :environment do |t, args|
- dry_run = args.commit.nil? || args.commit.empty?
- total_messages = 0
- messages_to_reparse = 0
- IncomingMessage.find_each :include => :foi_attachments do |im|
- begin
- reparse = im.foi_attachments.any? { |fa| ! File.exists? fa.filepath }
- total_messages += 1
- messages_to_reparse += 1 if reparse
- if total_messages % 1000 == 0
- puts "Considered #{total_messages} received emails."
- end
- unless dry_run
- im.parse_raw_email! true if reparse
- sleep 2
- end
- rescue StandardError => e
- puts "There was a #{e.class} exception reparsing IncomingMessage with ID #{im.id}"
- puts e.backtrace
- puts e.message
- end
- end
- message = dry_run ? "Would reparse" : "Reparsed"
- message += " #{messages_to_reparse} out of #{total_messages} received emails."
- puts message
- end
-
- desc 'Cleanup accounts with a space in the email address'
- task :clean_up_emails_with_spaces => :environment do
- dryrun = ENV['DRYRUN'] == '0' ? false : true
- if dryrun
- puts "This is a dryrun"
- end
- count = 0
- User.find_each do |user|
- if / /.match(user.email)
-
- email_without_spaces = user.email.gsub(' ', '')
- existing = User.find_user_by_email(email_without_spaces)
- # Another account exists with the canonical address
- if existing
- if user.info_requests.count == 0 and user.comments.count == 0 and user.track_things.count == 0
- count += 1
- disable_duplicate_account(user, count, dryrun)
- elsif existing.info_requests.count == 0 and existing.comments.count == 0 and existing.track_things.count == 0
- count += 1
- disable_duplicate_account(existing, count, dryrun)
- user.email = email_without_spaces
- puts "Updating #{user.email} to #{email_without_spaces} for user #{user.id}"
- user.save! unless dryrun
- else
- user.info_requests.each do |info_request|
- info_request.user = existing
- info_request.save! unless dryrun
- puts "Moved request #{info_request.id} from user #{user.id} to #{existing.id}"
- end
-
- user.comments.each do |comment|
- comment.user = existing
- comment.save! unless dryrun
- puts "Moved comment #{comment.id} from user #{user.id} to #{existing.id}"
- end
-
- user.track_things.each do |track_thing|
- track_thing.tracking_user = existing
- track_thing.save! unless dryrun
- puts "Moved track thing #{track_thing.id} from user #{user.id} to #{existing.id}"
- end
-
- TrackThingsSentEmail.find_each(:conditions => ['user_id = ?', user]) do |sent_email|
- sent_email.user = existing
- sent_email.save! unless dryrun
- puts "Moved track thing sent email #{sent_email.id} from user #{user.id} to #{existing.id}"
-
- end
-
- user.censor_rules.each do |censor_rule|
- censor_rule.user = existing
- censor_rule.save! unless dryrun
- puts "Moved censor rule #{censor_rule.id} from user #{user.id} to #{existing.id}"
- end
-
- user.user_info_request_sent_alerts.each do |sent_alert|
- sent_alert.user = existing
- sent_alert.save! unless dryrun
- puts "Moved sent alert #{sent_alert.id} from user #{user.id} to #{existing.id}"
- end
-
- count += 1
- disable_duplicate_account(user, count, dryrun)
- end
- else
- puts "Updating #{user.email} to #{email_without_spaces} for user #{user.id}"
- user.email = email_without_spaces
- user.save! unless dryrun
- end
- end
- end
- end
-
- desc 'Create a CSV file of a random selection of raw emails, for comparing hexdigests'
- task :random_attachments_hexdigests => :environment do
- # The idea is to run this under the Rail 2 codebase, where
- # Tmail was used to extract the attachements, and the task
- # will output all of those file paths in a CSV file, and a
- # list of the raw email files in another. The latter file is
- # useful so that one can easily tar up the emails with:
- #
- # tar cvz -T raw-email-files -f raw_emails.tar.gz
- #
- # Then you can switch to the Rails 3 codebase, where
- # attachment parsing is done via
- # recompute_attachments_hexdigests
-
- require 'csv'
-
- File.open('raw-email-files', 'w') do |f|
- CSV.open('attachment-hexdigests.csv', 'w') do |csv|
- csv << ['filepath', 'i', 'url_part_number', 'hexdigest']
- IncomingMessage.all(:order => 'RANDOM()', :limit => 1000).each do |incoming_message|
- # raw_email.filepath fails unless the
- # incoming_message has an associated request
- next unless incoming_message.info_request
- raw_email = incoming_message.raw_email
- f.puts raw_email.filepath
- incoming_message.foi_attachments.each_with_index do |attachment, i|
- csv << [raw_email.filepath, i, attachment.url_part_number, attachment.hexdigest]
- end
- end
- end
- end
-
- end
-
-
- desc 'Check the hexdigests of attachments in emails on disk'
- task :recompute_attachments_hexdigests => :environment do
-
- require 'csv'
- require 'digest/md5'
-
- OldAttachment = Struct.new :filename, :attachment_index, :url_part_number, :hexdigest
-
- filename_to_attachments = Hash.new {|h,k| h[k] = []}
-
- header_line = true
- CSV.foreach('attachment-hexdigests.csv') do |filename, attachment_index, url_part_number, hexdigest|
- if header_line
- header_line = false
- else
- filename_to_attachments[filename].push OldAttachment.new filename, attachment_index, url_part_number, hexdigest
- end
+ desc 'Analyse rails log specified by LOG_FILE to produce a list of request volume'
+ task :request_volume => :environment do
+ example = 'rake log_analysis:request_volume LOG_FILE=log/access_log OUTPUT_FILE=/tmp/log_analysis.csv'
+ check_for_env_vars(['LOG_FILE', 'OUTPUT_FILE'],example)
+ log_file_path = ENV['LOG_FILE']
+ output_file_path = ENV['OUTPUT_FILE']
+ is_gz = log_file_path.include?(".gz")
+ urls = Hash.new(0)
+ f = is_gz ? Zlib::GzipReader.open(log_file_path) : File.open(log_file_path, 'r')
+ processed = 0
+ f.each_line do |line|
+ line.force_encoding('ASCII-8BIT')
+ if request_match = line.match(/^Started (?<method>GET|OPTIONS|POST) "(?<path>\/request\/.*?)"/)
+ next if line.match(/request\/\d+\/response/)
+ urls[request_match[:path]] += 1
+ processed += 1
+ end
+ end
+ url_counts = urls.to_a
+ num_requests_visited_n_times = Hash.new(0)
+ CSV.open(output_file_path, "wb") do |csv|
+ csv << ['URL', 'Number of visits']
+ url_counts.sort_by(&:last).each do |url, count|
+ num_requests_visited_n_times[count] +=1
+ csv << [url,"#{count}"]
+ end
+ csv << ['Number of visits', 'Number of URLs']
+ num_requests_visited_n_times.to_a.sort.each do |number_of_times, number_of_requests|
+ csv << [number_of_times, number_of_requests]
+ end
+ csv << ['Total number of visits']
+ csv << [processed]
end
- total_attachments = 0
- attachments_with_different_hexdigest = 0
- files_with_different_numbers_of_attachments = 0
- no_tnef_attachments = 0
- no_parts_in_multipart = 0
-
- multipart_error = "no parts on multipart mail"
- tnef_error = "tnef produced no attachments"
-
- # Now check each file:
- filename_to_attachments.each do |filename, old_attachments|
-
- # Currently it doesn't seem to be possible to reuse the
- # attachment parsing code in Alaveteli without saving
- # objects to the database, so reproduce what it does:
-
- raw_email = nil
- File.open(filename) do |f|
- raw_email = f.read
- end
- mail = MailHandler.mail_from_raw_email(raw_email)
-
- begin
- attachment_attributes = MailHandler.get_attachment_attributes(mail)
- rescue IOError => e
- if e.message == tnef_error
- puts "#{filename} #{tnef_error}"
- no_tnef_attachments += 1
- next
- else
- raise
- end
- rescue Exception => e
- if e.message == multipart_error
- puts "#{filename} #{multipart_error}"
- no_parts_in_multipart += 1
- next
- else
- raise
- end
- end
-
- if attachment_attributes.length != old_attachments.length
- puts "#{filename} the number of old attachments #{old_attachments.length} didn't match the number of new attachments #{attachment_attributes.length}"
- files_with_different_numbers_of_attachments += 1
- else
- old_attachments.each_with_index do |old_attachment, i|
- total_attachments += 1
- attrs = attachment_attributes[i]
- old_hexdigest = old_attachment.hexdigest
- new_hexdigest = attrs[:hexdigest]
- new_content_type = attrs[:content_type]
- old_url_part_number = old_attachment.url_part_number.to_i
- new_url_part_number = attrs[:url_part_number]
- if old_url_part_number != new_url_part_number
- puts "#{i} #{filename} old_url_part_number #{old_url_part_number}, new_url_part_number #{new_url_part_number}"
- end
- if old_hexdigest != new_hexdigest
- body = attrs[:body]
- # First, if the content type is one of
- # text/plain, text/html or application/rtf try
- # changing CRLF to LF and calculating a new
- # digest - we generally don't worry about
- # these changes:
- new_converted_hexdigest = nil
- if ["text/plain", "text/html", "application/rtf"].include? new_content_type
- converted_body = body.gsub /\r\n/, "\n"
- new_converted_hexdigest = Digest::MD5.hexdigest converted_body
- puts "new_converted_hexdigest is #{new_converted_hexdigest}"
- end
- if (! new_converted_hexdigest) || (old_hexdigest != new_converted_hexdigest)
- puts "#{i} #{filename} old_hexdigest #{old_hexdigest} wasn't the same as new_hexdigest #{new_hexdigest}"
- puts " body was of length #{body.length}"
- puts " content type was: #{new_content_type}"
- path = "/tmp/#{new_hexdigest}"
- f = File.new path, "w"
- f.write body
- f.close
- puts " wrote body to #{path}"
- attachments_with_different_hexdigest += 1
- end
- end
- end
- end
-
- end
-
- puts "total_attachments: #{total_attachments}"
- puts "attachments_with_different_hexdigest: #{attachments_with_different_hexdigest}"
- puts "files_with_different_numbers_of_attachments: #{files_with_different_numbers_of_attachments}"
- puts "no_tnef_attachments: #{no_tnef_attachments}"
- puts "no_parts_in_multipart: #{no_parts_in_multipart}"
-
end
end
diff --git a/spec/controllers/general_controller_spec.rb b/spec/controllers/general_controller_spec.rb
index 116dbe07a..e67cc9492 100644
--- a/spec/controllers/general_controller_spec.rb
+++ b/spec/controllers/general_controller_spec.rb
@@ -116,49 +116,7 @@ describe GeneralController, "when showing the frontpage" do
end
end
-describe GeneralController, "when showing the front page with fixture data" do
- describe 'when constructing the list of recent requests' do
-
- before(:each) do
- get_fixtures_xapian_index
- end
-
- describe 'when there are fewer than five successful requests' do
-
- it 'should list the most recently sent and successful requests by the creation date of the
- request event' do
- # Make sure the newest response is listed first even if a request
- # with an older response has a newer comment or was reclassified more recently:
- # https://github.com/mysociety/alaveteli/issues/370
- #
- # This is a deliberate behaviour change, in that the
- # previous behaviour (showing more-recently-reclassified
- # requests first) was intentional.
- get :frontpage
-
- request_events = assigns[:request_events]
- previous = nil
- request_events.each do |event|
- if previous
- previous.created_at.should be >= event.created_at
- end
- ['sent', 'response'].include?(event.event_type).should be_true
- if event.event_type == 'response'
- ['successful', 'partially_successful'].include?(event.calculated_state).should be_true
- end
- previous = event
- end
- end
- end
-
- it 'should coalesce duplicate requests' do
- get :frontpage
- assigns[:request_events].map(&:info_request).select{|x|x.url_title =~ /^spam/}.length.should == 1
- end
- end
-
-end
describe GeneralController, 'when using xapian search' do
diff --git a/spec/models/info_request_spec.rb b/spec/models/info_request_spec.rb
index 64ad1972e..dcc94e967 100644
--- a/spec/models/info_request_spec.rb
+++ b/spec/models/info_request_spec.rb
@@ -27,7 +27,7 @@ require File.expand_path(File.dirname(__FILE__) + '/../spec_helper')
describe InfoRequest do
- describe 'when validating', :focus => true do
+ describe 'when validating' do
it 'should accept a summary with ascii characters' do
info_request = InfoRequest.new(:title => 'abcde')
@@ -1030,7 +1030,7 @@ describe InfoRequest do
end
end
- context "another series of events on a request", :focus => true do
+ context "another series of events on a request" do
it "should have sensible event states" do
# An initial request is sent
request.log_event('sent', {})
@@ -1122,5 +1122,59 @@ describe InfoRequest do
end
+ describe InfoRequest, 'when getting similar requests' do
+
+ before(:each) do
+ get_fixtures_xapian_index
+ end
+
+ it 'should return similar requests' do
+ similar, more = info_requests(:spam_1_request).similar_requests(1)
+ similar.results.first[:model].info_request.should == info_requests(:spam_2_request)
+ end
+
+ it 'should return a flag set to true' do
+ similar, more = info_requests(:spam_1_request).similar_requests(1)
+ more.should be_true
+ end
+
+ end
+
+ describe InfoRequest, 'when constructing the list of recent requests' do
+ before(:each) do
+ get_fixtures_xapian_index
+ end
+
+ describe 'when there are fewer than five successful requests' do
+
+ it 'should list the most recently sent and successful requests by the creation date of the
+ request event' do
+ # Make sure the newest response is listed first even if a request
+ # with an older response has a newer comment or was reclassified more recently:
+ # https://github.com/mysociety/alaveteli/issues/370
+ #
+ # This is a deliberate behaviour change, in that the
+ # previous behaviour (showing more-recently-reclassified
+ # requests first) was intentional.
+ request_events, request_events_all_successful = InfoRequest.recent_requests
+ previous = nil
+ request_events.each do |event|
+ if previous
+ previous.created_at.should be >= event.created_at
+ end
+ ['sent', 'response'].include?(event.event_type).should be_true
+ if event.event_type == 'response'
+ ['successful', 'partially_successful'].include?(event.calculated_state).should be_true
+ end
+ previous = event
+ end
+ end
+ end
+
+ it 'should coalesce duplicate requests' do
+ request_events, request_events_all_successful = InfoRequest.recent_requests
+ request_events.map(&:info_request).select{|x|x.url_title =~ /^spam/}.length.should == 1
+ end
+ end
end