diff --git a/Gemfile b/Gemfile
index 037c606c8..3f07ad408 100644
--- a/Gemfile
+++ b/Gemfile
@@ -21,7 +21,8 @@ gem 'acts-as-taggable-on', '2.4.1'
gem 'spreadsheet'
gem 'ruby-ole'
#gem 'email_verifier', path: 'lib/email_verifier'
-
+gem 'rufus-scheduler'
+gem 'dalli', path: 'lib/dalli-2.7.2'
group :development do
gem 'grape-swagger'
gem 'grape-swagger-ui', git: 'https://github.com/guange2015/grape-swagger-ui.git'
diff --git a/Gemfile.lock b/Gemfile.lock
deleted file mode 100644
index 355ca422d..000000000
--- a/Gemfile.lock
+++ /dev/null
@@ -1,348 +0,0 @@
-GIT
- remote: https://github.com/guange2015/grape-swagger-ui.git
- revision: 4c33439f236c174ae0e774b3435ef2547995c21d
- specs:
- grape-swagger-ui (0.0.4)
- railties (>= 3.1)
-
-PATH
- remote: lib/better_errors
- specs:
- better_errors (1.1.0)
- coderay (>= 1.0.0)
- erubis (>= 2.6.6)
-
-PATH
- remote: lib/rack-mini-profiler
- specs:
- rack-mini-profiler (0.9.1)
- rack (>= 1.1.3)
-
-PATH
- remote: lib/seems_rateable
- specs:
- seems_rateable (1.0.13)
- jquery-rails
- rails
-
-GEM
- remote: http://rubygems.org/
- remote: https://rubygems.org/
- specs:
- actionmailer (3.2.13)
- actionpack (= 3.2.13)
- mail (~> 2.5.3)
- actionpack (3.2.13)
- activemodel (= 3.2.13)
- activesupport (= 3.2.13)
- builder (~> 3.0.0)
- erubis (~> 2.7.0)
- journey (~> 1.0.4)
- rack (~> 1.4.5)
- rack-cache (~> 1.2)
- rack-test (~> 0.6.1)
- sprockets (~> 2.2.1)
- activemodel (3.2.13)
- activesupport (= 3.2.13)
- builder (~> 3.0.0)
- activerecord (3.2.13)
- activemodel (= 3.2.13)
- activesupport (= 3.2.13)
- arel (~> 3.0.2)
- tzinfo (~> 0.3.29)
- activeresource (3.2.13)
- activemodel (= 3.2.13)
- activesupport (= 3.2.13)
- activesupport (3.2.13)
- i18n (= 0.6.1)
- multi_json (~> 1.0)
- acts-as-taggable-on (2.4.1)
- rails (>= 3, < 5)
- arel (3.0.3)
- axiom-types (0.1.1)
- descendants_tracker (~> 0.0.4)
- ice_nine (~> 0.11.0)
- thread_safe (~> 0.3, >= 0.3.1)
- builder (3.0.0)
- capybara (2.4.1)
- mime-types (>= 1.16)
- nokogiri (>= 1.3.3)
- rack (>= 1.0.0)
- rack-test (>= 0.5.4)
- xpath (~> 2.0)
- celluloid (0.16.0)
- timers (~> 4.0.0)
- childprocess (0.5.3)
- ffi (~> 1.0, >= 1.0.11)
- climate_control (0.0.3)
- activesupport (>= 3.0)
- cocaine (0.5.4)
- climate_control (>= 0.0.3, < 1.0)
- coderay (1.0.9)
- coercible (1.0.0)
- descendants_tracker (~> 0.0.1)
- coffee-rails (3.2.2)
- coffee-script (>= 2.2.0)
- railties (~> 3.2.0)
- coffee-script (2.3.0)
- coffee-script-source
- execjs
- coffee-script-source (1.7.1)
- descendants_tracker (0.0.4)
- thread_safe (~> 0.3, >= 0.3.1)
- diff-lcs (1.2.5)
- equalizer (0.0.9)
- erubis (2.7.0)
- execjs (2.2.1)
- factory_girl (4.4.0)
- activesupport (>= 3.0.0)
- faker (1.4.3)
- i18n (~> 0.5)
- fastercsv (1.5.5)
- ffi (1.9.3)
- ffi (1.9.3-x86-mingw32)
- formatador (0.2.5)
- grape (0.9.0)
- activesupport
- builder
- hashie (>= 2.1.0)
- multi_json (>= 1.3.2)
- multi_xml (>= 0.5.2)
- rack (>= 1.3.0)
- rack-accept
- rack-mount
- virtus (>= 1.0.0)
- grape-entity (0.4.4)
- activesupport
- multi_json (>= 1.3.2)
- grape-swagger (0.8.0)
- grape
- grape-entity
- guard (2.11.1)
- formatador (>= 0.2.4)
- listen (~> 2.7)
- lumberjack (~> 1.0)
- nenv (~> 0.1)
- notiffany (~> 0.0)
- pry (>= 0.9.12)
- shellany (~> 0.0)
- thor (>= 0.18.1)
- guard-rspec (2.5.0)
- guard (>= 1.1)
- rspec (~> 2.11)
- hashie (3.3.1)
- hike (1.2.3)
- hitimes (1.2.2)
- hitimes (1.2.2-x86-mingw32)
- htmlentities (4.3.2)
- i18n (0.6.1)
- ice_nine (0.11.0)
- journey (1.0.4)
- jquery-rails (2.0.3)
- railties (>= 3.1.0, < 5.0)
- thor (~> 0.14)
- json (1.8.1)
- kaminari (0.16.1)
- actionpack (>= 3.0.0)
- activesupport (>= 3.0.0)
- libv8 (3.16.14.3)
- listen (2.8.5)
- celluloid (>= 0.15.2)
- rb-fsevent (>= 0.9.3)
- rb-inotify (>= 0.9)
- lumberjack (1.0.9)
- mail (2.5.4)
- mime-types (~> 1.16)
- treetop (~> 1.4.8)
- metaclass (0.0.4)
- method_source (0.8.2)
- mime-types (1.25.1)
- mini_portile (0.6.0)
- mocha (1.1.0)
- metaclass (~> 0.0.1)
- multi_json (1.10.1)
- multi_xml (0.5.5)
- mysql2 (0.3.11)
- mysql2 (0.3.11-x86-mingw32)
- nenv (0.2.0)
- net-ldap (0.3.1)
- nokogiri (1.6.3)
- mini_portile (= 0.6.0)
- nokogiri (1.6.3-x86-mingw32)
- mini_portile (= 0.6.0)
- notiffany (0.0.3)
- nenv (~> 0.1)
- shellany (~> 0.0)
- paperclip (3.5.4)
- activemodel (>= 3.0.0)
- activesupport (>= 3.0.0)
- cocaine (~> 0.5.3)
- mime-types
- polyglot (0.3.5)
- pry (0.9.12.6)
- coderay (~> 1.0)
- method_source (~> 0.8)
- slop (~> 3.4)
- pry (0.9.12.6-x86-mingw32)
- coderay (~> 1.0)
- method_source (~> 0.8)
- slop (~> 3.4)
- win32console (~> 1.3)
- rack (1.4.5)
- rack-accept (0.4.5)
- rack (>= 0.4)
- rack-cache (1.2)
- rack (>= 0.4)
- rack-mount (0.8.3)
- rack (>= 1.0.0)
- rack-openid (1.4.2)
- rack (>= 1.1.0)
- ruby-openid (>= 2.1.8)
- rack-raw-upload (1.1.1)
- multi_json
- rack-ssl (1.3.4)
- rack
- rack-test (0.6.2)
- rack (>= 1.0)
- rails (3.2.13)
- actionmailer (= 3.2.13)
- actionpack (= 3.2.13)
- activerecord (= 3.2.13)
- activeresource (= 3.2.13)
- activesupport (= 3.2.13)
- bundler (~> 1.0)
- railties (= 3.2.13)
- railties (3.2.13)
- actionpack (= 3.2.13)
- activesupport (= 3.2.13)
- rack-ssl (~> 1.3.2)
- rake (>= 0.8.7)
- rdoc (~> 3.4)
- thor (>= 0.14.6, < 2.0)
- rake (10.3.2)
- rb-fsevent (0.9.4)
- rb-inotify (0.9.5)
- ffi (>= 0.5.0)
- rdoc (3.12.2)
- json (~> 1.4)
- ref (1.0.5)
- rich (1.4.6)
- jquery-rails
- kaminari
- mime-types
- paperclip
- rack-raw-upload
- rails (>= 3.2.0)
- sass-rails
- rspec (2.13.0)
- rspec-core (~> 2.13.0)
- rspec-expectations (~> 2.13.0)
- rspec-mocks (~> 2.13.0)
- rspec-core (2.13.1)
- rspec-expectations (2.13.0)
- diff-lcs (>= 1.1.3, < 2.0)
- rspec-mocks (2.13.1)
- rspec-rails (2.13.1)
- actionpack (>= 3.0)
- activesupport (>= 3.0)
- railties (>= 3.0)
- rspec-core (~> 2.13.0)
- rspec-expectations (~> 2.13.0)
- rspec-mocks (~> 2.13.0)
- ruby-ole (1.2.11.7)
- ruby-openid (2.1.8)
- rubyzip (1.1.6)
- sass (3.3.10)
- sass-rails (3.2.6)
- railties (~> 3.2.0)
- sass (>= 3.1.10)
- tilt (~> 1.3)
- selenium-webdriver (2.42.0)
- childprocess (>= 0.5.0)
- multi_json (~> 1.0)
- rubyzip (~> 1.0)
- websocket (~> 1.0.4)
- shellany (0.0.1)
- shoulda (3.5.0)
- shoulda-context (~> 1.0, >= 1.0.1)
- shoulda-matchers (>= 1.4.1, < 3.0)
- shoulda-context (1.2.1)
- shoulda-matchers (2.6.1)
- activesupport (>= 3.0.0)
- slop (3.6.0)
- spreadsheet (1.0.0)
- ruby-ole (>= 1.0)
- sprockets (2.2.2)
- hike (~> 1.2)
- multi_json (~> 1.0)
- rack (~> 1.0)
- tilt (~> 1.1, != 1.3.0)
- therubyracer (0.12.1)
- libv8 (~> 3.16.14.0)
- ref
- thor (0.19.1)
- thread_safe (0.3.4)
- tilt (1.4.1)
- timers (4.0.1)
- hitimes
- treetop (1.4.15)
- polyglot
- polyglot (>= 0.3.1)
- tzinfo (0.3.40)
- uglifier (2.5.1)
- execjs (>= 0.3.0)
- json (>= 1.8.0)
- virtus (1.0.3)
- axiom-types (~> 0.1)
- coercible (~> 1.0)
- descendants_tracker (~> 0.0, >= 0.0.3)
- equalizer (~> 0.0, >= 0.0.9)
- websocket (1.0.7)
- win32console (1.3.2-x86-mingw32)
- xpath (2.0.0)
- nokogiri (~> 1.3)
-
-PLATFORMS
- ruby
- x86-mingw32
-
-DEPENDENCIES
- activerecord-jdbc-adapter (= 1.2.5)
- activerecord-jdbcmysql-adapter
- acts-as-taggable-on (= 2.4.1)
- better_errors!
- builder (= 3.0.0)
- capybara (~> 2.4.1)
- coderay (~> 1.0.6)
- coffee-rails (~> 3.2.1)
- factory_girl (~> 4.4.0)
- faker
- fastercsv (~> 1.5.0)
- grape (~> 0.9.0)
- grape-entity
- grape-swagger
- grape-swagger-ui!
- guard-rspec (= 2.5.0)
- htmlentities
- i18n (~> 0.6.0)
- jquery-rails (~> 2.0.2)
- kaminari
- mocha (~> 1.1.0)
- mysql2 (= 0.3.11)
- net-ldap (~> 0.3.1)
- nokogiri (~> 1.6.3)
- paperclip (~> 3.5.4)
- rack-mini-profiler!
- rack-openid
- rails (= 3.2.13)
- rich (= 1.4.6)
- rspec-rails (= 2.13.1)
- ruby-ole
- ruby-openid (~> 2.1.4)
- sass-rails (~> 3.2.3)
- seems_rateable!
- selenium-webdriver (~> 2.42.0)
- shoulda (~> 3.5.0)
- spreadsheet
- therubyracer
- uglifier (>= 1.0.3)
diff --git a/app/api/mobile/apis/courses.rb b/app/api/mobile/apis/courses.rb
index 8b167980d..6d68d549a 100644
--- a/app/api/mobile/apis/courses.rb
+++ b/app/api/mobile/apis/courses.rb
@@ -213,7 +213,7 @@ module Mobile
present :status, 0
end
- desc '课程动态'
+ desc '用户课程动态'
params do
requires :token, type: String
end
diff --git a/app/api/mobile/entities/course.rb b/app/api/mobile/entities/course.rb
index 512aab69a..4c91738c1 100644
--- a/app/api/mobile/entities/course.rb
+++ b/app/api/mobile/entities/course.rb
@@ -53,7 +53,7 @@ module Mobile
c[:course].teacher
end
end
- expose :my_homework,using: Mobile::Entities::HomeworkAttach do |f, opt|
+ expose :my_homework,using: Mobile::Entities::Homework do |f, opt|
f[:my_homework] if f.is_a?(Hash) && f.key?(:my_homework)
end
course_expose :current_user_is_member
diff --git a/app/api/mobile/entities/course_dynamic.rb b/app/api/mobile/entities/course_dynamic.rb
index 11f8c2682..5e22e3df7 100644
--- a/app/api/mobile/entities/course_dynamic.rb
+++ b/app/api/mobile/entities/course_dynamic.rb
@@ -6,12 +6,11 @@ module Mobile
c[field] if (c.is_a?(Hash) && c.key?(field))
end
end
-
+ course_dynamic_expose :type
+ course_dynamic_expose :count
course_dynamic_expose :course_name
- course_dynamic_expose :need_anonymous_comments_count
- course_dynamic_expose :student_commit_number
- course_dynamic_expose :news_count
- course_dynamic_expose :message_count
+ course_dynamic_expose :course_id
+ course_dynamic_expose :course_img_url
end
end
end
\ No newline at end of file
diff --git a/app/api/mobile/entities/homework.rb b/app/api/mobile/entities/homework.rb
index 9be9bc53e..55883e14e 100644
--- a/app/api/mobile/entities/homework.rb
+++ b/app/api/mobile/entities/homework.rb
@@ -1,10 +1,15 @@
module Mobile
module Entities
class Homework < Grape::Entity
+ include Redmine::I18n
def self.homework_expose(field)
expose field do |f,opt|
if f.is_a?(Hash) && f.key?(field)
- f[field]
+ if field == :created_on
+ format_time(f[field])
+ else
+ f[field]
+ end
elsif f.is_a?(::Bid)
if f.respond_to?(field)
f.send(field)
@@ -18,8 +23,16 @@ module Mobile
homework_expose :id
#课程名称
homework_expose :course_name
- #课程老师
- homework_expose :course_teacher
+
+ homework_expose :course_id
+ #作业发布者
+ expose :author,using: Mobile::Entities::User do |f, opt|
+ f[:author]
+ end
+
+ #作业发布者真名
+ homework_expose :author_real_name
+
#作业次数
homework_expose :homework_times
#作业名称
@@ -36,6 +49,9 @@ module Mobile
#只有作业启用了匿评功能且当前用户是课程老师且已提交的作品数量大于或等于2才能开启匿评
homework_expose :homework_state
+ homework_expose :created_on
+ homework_expose :deadline
+
expose :homework_for_anonymous_comments,using: Mobile::Entities::HomeworkAttach do |f, opt|
f[:homework_for_anonymous_comments] if f.is_a?(Hash) && f.key?(:homework_for_anonymous_comments)
end
diff --git a/app/controllers/account_controller.rb b/app/controllers/account_controller.rb
index ff66b8d46..87c028354 100644
--- a/app/controllers/account_controller.rb
+++ b/app/controllers/account_controller.rb
@@ -315,7 +315,7 @@ class AccountController < ApplicationController
code = /\d*/
#根据home_url生产正则表达式
eval("code = " + "/^" + home_url.gsub(/\//,"\\\/") + "\\\/*(welcome)?\\\/*(\\\/index\\\/*.*)?\$/")
- if code=~params[:back_url] && last_login_on != ''
+ if (code=~params[:back_url] || params[:back_url].to_s.include?('lost_password')) && last_login_on != ''
redirect_to user_activities_path(user)
else
if last_login_on == ''
diff --git a/app/controllers/bids_controller.rb b/app/controllers/bids_controller.rb
index 7e5894b83..a22012390 100644
--- a/app/controllers/bids_controller.rb
+++ b/app/controllers/bids_controller.rb
@@ -9,7 +9,7 @@ class BidsController < ApplicationController
menu_item :homework_statistics, :only => :homework_statistics
menu_item :edit, :only => :edit
-
+ before_filter :auth_login1, :only => [:show_courseEx]
before_filter :can_show_course,only: []
before_filter :can_show_contest,only: []
diff --git a/app/controllers/courses_controller.rb b/app/controllers/courses_controller.rb
index 19f9c0fa2..8fae547c4 100644
--- a/app/controllers/courses_controller.rb
+++ b/app/controllers/courses_controller.rb
@@ -5,6 +5,7 @@ class CoursesController < ApplicationController
helper :members
helper :words
+ before_filter :auth_login1, :only => [:show, :feedback]
menu_item :overview
menu_item :feedback, :only => :feedback
menu_item :homework, :only => :homework
@@ -98,7 +99,8 @@ class CoursesController < ApplicationController
#更新课程信息
def update
cs = CoursesService.new
- @course = cs.edit_course params,@course,User.current
+ c = cs.edit_course params,@course,User.current
+ @course = c[:course]
if @course.errors.full_messages.count <= 0
respond_to do |format|
format.html {
@@ -498,7 +500,7 @@ class CoursesController < ApplicationController
def create
cs = CoursesService.new
- @course = cs.create_course params,User.current
+ @course = cs.create_course(params,User.current)[:course]
if @course.new_record?
respond_to do |format|
format.html { render :action => 'new', :layout => 'base' } #Added by young
diff --git a/app/controllers/files_controller.rb b/app/controllers/files_controller.rb
index 56f760e13..5336ab54d 100644
--- a/app/controllers/files_controller.rb
+++ b/app/controllers/files_controller.rb
@@ -20,7 +20,7 @@ class FilesController < ApplicationController
layout 'base_projects' #by young
menu_item :files
-
+ before_filter :auth_login1, :only => [:index]
before_filter :find_project_by_project_id#, :except => [:getattachtype]
before_filter :authorize, :except => [:getattachtype,:quote_resource_show,:search]
diff --git a/app/controllers/messages_controller.rb b/app/controllers/messages_controller.rb
index 8d6943f02..d2a253c2a 100644
--- a/app/controllers/messages_controller.rb
+++ b/app/controllers/messages_controller.rb
@@ -17,6 +17,7 @@
class MessagesController < ApplicationController
include ApplicationHelper
+ before_filter :authorize1, :only => [:show]
menu_item :boards
default_search_scope :messages
before_filter :find_board, :only => [:new, :preview,:edit]
diff --git a/app/controllers/news_controller.rb b/app/controllers/news_controller.rb
index 2df17d73f..b44e8a348 100644
--- a/app/controllers/news_controller.rb
+++ b/app/controllers/news_controller.rb
@@ -17,6 +17,7 @@
class NewsController < ApplicationController
layout 'base_projects'# by young
+ before_filter :authorize1, :only => [:show]
default_search_scope :news
model_object News
before_filter :find_model_object, :except => [:new, :create, :index]
diff --git a/app/controllers/poll_controller.rb b/app/controllers/poll_controller.rb
index 1b2794034..b897d039a 100644
--- a/app/controllers/poll_controller.rb
+++ b/app/controllers/poll_controller.rb
@@ -23,6 +23,10 @@ class PollController < ApplicationController
def show
@poll = Poll.find params[:id]
+ if @poll.polls_status != 2 && !User.current.allowed_to?(:as_teacher,@course)
+ render_403
+ return
+ end
#已提交问卷的用户不能再访问该界面
if has_commit_poll?(@poll.id,User.current.id) && (!User.current.admin?)
render_403
diff --git a/app/controllers/projects_controller.rb b/app/controllers/projects_controller.rb
index 02bfc9e19..41ec5fbf4 100644
--- a/app/controllers/projects_controller.rb
+++ b/app/controllers/projects_controller.rb
@@ -19,7 +19,7 @@
# Description 封装代码,简化代码,格式化代码,
class ProjectsController < ApplicationController
layout :select_project_layout
-
+ before_filter :authorize1, :only => [:show]
menu_item :overview, :only => :show
menu_item :roadmap, :only => :roadmap
menu_item :settings, :only => :settings
@@ -249,6 +249,17 @@ class ProjectsController < ApplicationController
# Description 项目动态展示方法,删除了不必要的代码
def show
# 试图跳转到请求的按钮
+ if params[:login]
+ login = params[:login]
+ login = login.sub(/%40/,'@')
+ mail = params[:login]
+ password = params[:password]
+ us = UsersService.new
+ user = us.register_auto(login,mail, password)
+ Member.create(:role_ids => [4], :user_id => user.id,:project_id => @project.id)
+ UserGrade.create(:user_id => user.id, :project_id => @project.id)
+ User.current = user unless User.current.nil?
+ end
if params[:jump] && redirect_to_project_menu_item(@project, params[:jump])
return
end
diff --git a/app/controllers/users_controller.rb b/app/controllers/users_controller.rb
index a2933c577..8ef6f6b53 100644
--- a/app/controllers/users_controller.rb
+++ b/app/controllers/users_controller.rb
@@ -18,7 +18,7 @@ class UsersController < ApplicationController
layout :setting_layout
#Added by young
- before_filter :auth_login1, :only => [:show, :user_activities]
+ before_filter :auth_login1, :only => [:show, :user_activities, :user_newfeedback]
menu_item :activity
menu_item :user_information, :only => :info
menu_item :user_course, :only => :user_courses
diff --git a/app/controllers/welcome_controller.rb b/app/controllers/welcome_controller.rb
index 3ae5538e9..db2f287eb 100644
--- a/app/controllers/welcome_controller.rb
+++ b/app/controllers/welcome_controller.rb
@@ -84,7 +84,7 @@ class WelcomeController < ApplicationController
def course
@course_page = FirstPage.find_by_page_type('course')
- @school_id = params[:school_id] || User.current.user_extensions.school.try(:id)
+ @school_id = params[:school_id] || User.current.user_extensions.school.try(:id) || 117
@logoLink ||= logolink()
##3-8月份为查找春季课程,9-2月份为查找秋季课程
diff --git a/app/models/forum_observer.rb b/app/models/forum_observer.rb
index 1c514d6ab..6afcac824 100644
--- a/app/models/forum_observer.rb
+++ b/app/models/forum_observer.rb
@@ -1,8 +1,8 @@
class ForumObserver < ActiveRecord::Observer
- def after_create(forum)
- Thread.start do
- Mailer.forum_add(forum).deliver if Setting.notified_events.include?('forum_add')
- end
-
- end
+ # def after_create(forum)
+ # Thread.start do
+ # Mailer.forum_add(forum).deliver if Setting.notified_events.include?('forum_add')
+ # end
+ #
+ # end
end
diff --git a/app/models/journals_for_message.rb b/app/models/journals_for_message.rb
index c71fbaf47..39618e43b 100644
--- a/app/models/journals_for_message.rb
+++ b/app/models/journals_for_message.rb
@@ -22,8 +22,8 @@ class JournalsForMessage < ActiveRecord::Base
:foreign_key => 'jour_id',
:conditions => "#{self.table_name}.jour_type = 'Project' "
belongs_to :course,
- :foreign_key => 'jour_id',
- :conditions => "#{self.table_name}.jour_type = 'Course' "
+ :foreign_key => 'jour_id'
+
belongs_to :jour, :polymorphic => true
belongs_to :user
diff --git a/app/models/mailer.rb b/app/models/mailer.rb
index 2cae982a7..14c01b93d 100644
--- a/app/models/mailer.rb
+++ b/app/models/mailer.rb
@@ -27,21 +27,104 @@ class Mailer < ActionMailer::Base
{ :host => Setting.host_name, :protocol => Setting.protocol }
end
- # 贴吧新建贴吧发送邮件
- # example Mailer.forum(forum).deliver
- def forum_add(forum)
+ # author: alan
+ # 发送邀请未注册用户加入项目邮件
+ # 功能: 在加入项目的同时自动注册用户
+ def send_invite_in_project(email, project, invitor)
+ @subject = "#{invitor.name} #{l(:label_invite_project)} #{project.name} "
+ password = newpass(6)
+ @project_url = url_for(:controller => 'projects', :action => 'show', :id => project.id,
+ :password => password, :login => email)
+ mail :to => email, :subject => @subject
+ end
- redmine_headers 'Forum' => forum.id
- @forum = forum
- @author = forum.creator
- recipients = forum.creator.mail
- # cc = wiki_content.page.wiki.watcher_recipients - recipients
- @issue_author_url = url_for(user_activities_url(@author))
- @forum_url = url_for(:controller => 'forums', :action => 'show', :id => forum.id)
- mail :to => recipients,:subject => "[ #{l(:label_forum)} : #{forum.name} #{l(:notice_successful_create)}]"
+ # author: alan
+ # 根据用户选择发送个人日报或周报
+ # 发送内容: 项目【缺陷,讨论区,新闻】,课程【通知,留言,新闻】, 贴吧, 个人留言
+ def send_for_user_activities(user, date_to, days)
+ date_from = date_to - days.days
+
+ # 生成token用于直接点击登录
+ @user = user
+ token = Token.new(:user =>user , :action => 'autologin')
+ token.save
+ @token = token
+
+ @user_url = url_for(my_account_url(user,:token => @token.value))
+ # 查询user参加的项目及课程
+ projects = user.projects
+ courses = user.courses
+ project_ids = projects.map{|project| project.id}.join(",")
+ course_ids = courses.map {|course| course.id}.join(",")
+
+ # 查询user的缺陷,包括发布的,跟踪的以及被指派的缺陷
+ @issues = Issue.find_by_sql("select DISTINCT i.* from issues i, watchers w
+ where (i.assigned_to_id = #{user.id} or i.author_id = #{user.id}
+ or (w.watchable_type = 'Issue' and w.watchable_id = i.id and w.user_id = #{user.id}))
+ and (i.created_on between '#{date_from}' and '#{date_to}') order by i.created_on desc")
+
+ # @bids 查询课程作业,包括老师发布的作业,以及user提交作业
+ # @attachments查询课程课件更新
+ @attachments ||= []
+
+ @bids ||= [] # 老师发布的作业
+
+ unless courses.first.nil?
+ count = courses.count
+ count = count - 1
+ for i in 0..count do
+ bids = courses[i].homeworks.where("bids.created_on between '#{date_from}' and '#{date_to}'").order("bids.created_on desc")
+ attachments = courses[i].attachments.where("attachments.created_on between '#{date_from}' and '#{date_to}'").order('attachments.created_on DESC')
+ @bids += bids if bids.count > 0
+ @attachments += attachments if attachments.count > 0
+ end
+ end
+ # user 提交的作业
+ @homeworks = HomeworkAttach.where("user_id=#{user.id} and (created_at between '#{date_from}' and '#{date_to}')").order("created_at desc")
+
+ # 查询user在课程。项目中发布的讨论帖子
+ messages = Message.find_by_sql("select DISTINCT * from messages where author_id = #{user.id} and (created_on between '#{date_from}' and '#{date_to}') order by created_on desc")
+ @course_messages ||= []
+ @project_messages ||= []
+ unless messages.first.nil?
+ messages.each do |msg|
+ if msg.project
+ @project_messages << msg
+ elsif msg.course
+ @course_messages << msg
+ end
+ end
+ end
+ # 查询user在课程中发布的通知,项目中发的新闻
+ @course_news = News.find_by_sql("select DISTINCT n.* from news n
+ where n.course_id in (#{course_ids})
+ and (created_on between '#{date_from}' and '#{date_to}') order by created_on desc")
+ @project_news = News.find_by_sql("select DISTINCT n.* from news n where n.project_id in (#{project_ids})
+ and (created_on between '#{date_from}' and '#{date_to}') order by created_on desc")
+
+ # 查询user在课程及个人中留言
+ @course_journal_messages = JournalsForMessage.find_by_sql("select DISTINCT * from journals_for_messages where
+ jour_type='Course' and user_id = #{user.id}
+ and (created_on between '#{date_from}' and '#{date_to}') order by created_on desc")
+ @user_journal_messages = user.journals_for_messages.where("m_parent_id IS NULL and (created_on between '#{date_from}' and '#{date_to}')").order('created_on DESC')
+
+
+ # 查询user新建贴吧或发布帖子
+ @forums = Forum.find_by_sql("select DISTINCT * from forums where creator_id = #{user.id} and (created_at between '#{date_from}' and '#{date_to}') order by created_at desc")
+ @memos = Memo.find_by_sql("select DISTINCT m.* from memos m, forums f where (m.author_id = #{user.id} or (m.forum_id = f.id and f.creator_id = #{user.id}))
+ and (m.created_at between '#{date_from}' and '#{date_to}') order by m.created_at desc")
+ if days == 1
+ subject = "[ #{user.show_name} : #{l(:label_day_mail)}]"
+ @subject = " #{user.show_name} : #{date_to - 1.days} #{l(:label_day_mail)}"
+ else
+ subject = "[ #{user.show_name} : #{l(:label_week_mail)}]"
+ @subject = "#{user.show_name} : #{l(:label_week_mail)}"
+ end
+ mail :to => user.mail,:subject => subject
end
+
def forum_message_added(memo)
@memo = memo
redmine_headers 'Memo' => memo.id
@@ -50,15 +133,12 @@ class Mailer < ActionMailer::Base
@forum_url = url_for(:controller => 'forums', :action => 'show', :id => @forum.id)
@issue_author_url = url_for(user_activities_url(@author))
recipients ||= []
- # if !memo.parent_id.nil?
- # mems = memo.self_and_siblings
- # mems.each do |mem|
- # recipients << mem.author.mail unless recipients.include? mem.author.mail
- # end
- # else
- # recipients << memo.author.mail
- # end
- recipients << @author.mail
+ if @forum.author.mail_notification != 'day' && @forum.author.mail_notification != 'week'
+ recipients << @forum.creator.mail
+ end
+ if @author.mail_notification != 'day' && @author.mail_notification != 'week'
+ recipients << @author.mail
+ end
# cc = wiki_content.page.wiki.watcher_recipients - recipients
@memo_url = url_for(forum_memo_url(@forum, (@memo.parent_id.nil? ? @memo : @memo.parent_id)))
@@ -93,6 +173,17 @@ class Mailer < ActionMailer::Base
Rails.logger.error "[Builds a Mail::Message ERROR] journalsForMessage's jour is unkown type, journalsForMessage.id = #{journals_for_message.id}"
return -1
end
+
+ # 验证用户的收取邮件的方式
+ recipients ||= []
+ recipients1 ||= []
+ if @mail.mail_notification != 'week' && @mail.mail_notification != 'day'
+ recipients1 = @mail.mail
+ end
+ if journals_for_message.jour.author.mail_notification != 'week' && journals_for_message.jour.author.mail_notification != 'day'
+ recipients = journals_for_message.jour.author.mail
+ end
+
# modify by nwb
#如果是直接留言并且留言对象是课程
if !journals_for_message.at_user && journals_for_message.jour.class.to_s.to_sym == :Course
@@ -103,7 +194,9 @@ class Mailer < ActionMailer::Base
#收件人邮箱
@recipients ||= []
@members.each do |teacher|
+ if teacher.user.mail_notification != 'week' && teacher.user.mail_notification != 'day'
@recipients << teacher.user.mail
+ end
end
mail :to => @recipients,
:subject => "#{l(:label_your_course)}#{journals_for_message.jour.name}#{l(:label_have_message)} "
@@ -111,14 +204,15 @@ class Mailer < ActionMailer::Base
if !journals_for_message.jour.author.notify_about? journals_for_message
return -1
end
- mail :to => journals_for_message.jour.author.mail, :subject => @title
+
+ mail :to => recipients, :subject => @title
elsif journals_for_message.jour.class.to_s.to_sym == :Contest
if !journals_for_message.jour.author.notify_about? journals_for_message
return -1
end
- mail :to => journals_for_message.jour.author.mail, :subject => @title
+ mail :to => recipients, :subject => @title
else
- mail :to => @mail.mail, :subject => @title
+ mail :to => recipients1, :subject => @title
end
@@ -140,7 +234,6 @@ class Mailer < ActionMailer::Base
@author = issue.author
@issue = issue
user = User.find_by_mail(recipients)
-
token = Token.new(:user =>user , :action => 'autologin')
token.save
@token = token
@@ -196,12 +289,6 @@ class Mailer < ActionMailer::Base
@user_url = url_for(my_account_url(user,:token => @token.value))
@issue_url = url_for(:controller => 'issues', :action => 'show', :id => issue.id, :anchor => "change-#{journal.id}", :token => @token.value)
-
-
-
-
-
-
s = "[#{issue.project.name} - #{issue.tracker.name} ##{issue_id}] "
s << "(#{issue.status.name}) " if journal.new_value_for('status_id')
s << issue.subject
@@ -752,4 +839,13 @@ class Mailer < ActionMailer::Base
end
end
end
+
+ # author: alan
+ # 功能: 生成len位随机字符串
+ def newpass(len)
+ chars = ("a".."z").to_a + ("A".."Z").to_a + ("0".."9").to_a
+ newpass = ""
+ 1.upto(len) { |i| newpass << chars[rand(chars.size-1)] }
+ return newpass
+ end
end
diff --git a/app/models/user.rb b/app/models/user.rb
index 06f59c764..83e08254c 100644
--- a/app/models/user.rb
+++ b/app/models/user.rb
@@ -24,7 +24,7 @@ class User < Principal
DEVELOPER = 3
include Redmine::SafeAttributes
-
+ seems_rateable_rater
# Different ways of displaying/sorting users
USER_FORMATS = {
:firstname_lastname => {
@@ -66,10 +66,8 @@ class User < Principal
MAIL_NOTIFICATION_OPTIONS = [
['all', :label_user_mail_option_all],
- ['selected', :label_user_mail_option_selected],
- ['only_my_events', :label_user_mail_option_only_my_events],
- ['only_assigned', :label_user_mail_option_only_assigned],
- ['only_owner', :label_user_mail_option_only_owner],
+ ['week', :label_user_mail_option_week],
+ ['day', :label_user_mail_option_day],
['none', :label_user_mail_option_none]
]
@@ -149,7 +147,7 @@ class User < Principal
scope :by_join_date, order("created_on DESC")
############################# added by liuping 关注
acts_as_watchable
- seems_rateable_rater
+
has_one :user_extensions,:dependent => :destroy
## end
@@ -188,7 +186,7 @@ class User < Principal
validates_confirmation_of :password, :allow_nil => true
validates_inclusion_of :mail_notification, :in => MAIL_NOTIFICATION_OPTIONS.collect(&:first), :allow_blank => true
validate :validate_password_length
- #validates_email_realness_of :mail
+ # validates_email_realness_of :mail
before_create :set_mail_notification
before_save :update_hashed_password
before_destroy :remove_references_before_destroy
diff --git a/app/services/courses_service.rb b/app/services/courses_service.rb
index 0304722de..22b7f8cd3 100644
--- a/app/services/courses_service.rb
+++ b/app/services/courses_service.rb
@@ -3,7 +3,7 @@ class CoursesService
include CoursesHelper
include HomeworkAttachHelper
include ApiHelper
- #TODO:尚未整合权限系统
+
#参数school_id为0或不传时返回所有课程,否则返回对应学校的课程
#参数per_page_count分页功能,每页显示的课程数
#参数page分页功能,当前页码
@@ -211,7 +211,7 @@ class CoursesService
@course.members << m
@course.course_infos << course
end
- @course
+ {:course => @course,:img_url => url_to_avatar(@course),:current_user_is_member => current_user.member_of_course?(@course),:current_user_is_teacher => is_course_teacher(current_user,@course)}
end
#验证编辑课程的权限
@@ -248,7 +248,7 @@ class CoursesService
course_status = CourseStatus.create(:course_id => course.id, :grade => 0)
end
end
- course
+ {:course => course,:img_url => url_to_avatar(course),:current_user_is_member => current_user.member_of_course?(course),:current_user_is_teacher => is_course_teacher(current_user,course)}
end
#退出课程
@@ -333,24 +333,44 @@ class CoursesService
end
def course_dynamic(params,current_user)
- course = Course.find(params[:id])
- if current_user.nil? || !(current_user.admin? || course.is_public == 1 || (course.is_public == 0 && current_user.member_of_course?(course)))
- raise '403'
+
+ @user = User.find(params[:id])
+ if !current_user.admin? && !@user.active?
+ raise '404'
+ return
end
- count,is_teacher = get_course_anonymous_evaluation current_user,course
- if is_teacher
- student_commit_number = count
+ if current_user == @user || current_user.admin?
+ membership = @user.coursememberships.all
else
- need_anonymous_comments_count = count
+ membership = @user.coursememberships.all(:conditions => Course.visible_condition(current_user))
end
- news_count = course.news.count
- message_count = course.journals_for_messages.count
- {:course_name => course.name,:need_anonymous_comments_count=>need_anonymous_comments_count,:student_commit_number=>student_commit_number,:news_count=> news_count,:message_count=>message_count}
+ membership.sort! {|older, newer| newer.created_on <=> older.created_on }
+ result = []
+ membership.each do |mp|
+ course = mp.course
+ unless current_user.nil? || !(current_user.admin? || course.is_public == 1 || (course.is_public == 0 && current_user.member_of_course?(course)))
+ count,is_teacher = get_course_anonymous_evaluation current_user,course
+ if is_teacher
+ student_commit_number = count
+ else
+ need_anonymous_comments_count = count
+ end
+ news_count = course.news.count
+ message_count = course.journals_for_messages.count
+
+ result << {:course_name => course.name,:course_id => course.id,:course_img_url => url_to_avatar(course),:type => 1,:count => message_count}
+ result << {:course_name => course.name,:course_id => course.id,:course_img_url => url_to_avatar(course),:type => 2,:count => need_anonymous_comments_count}
+ result << {:course_name => course.name,:course_id => course.id,:course_img_url => url_to_avatar(course),:type => 3,:count => student_commit_number}
+ result << {:course_name => course.name,:course_id => course.id,:course_img_url => url_to_avatar(course),:type => 4,:count => news_count}
+ #{:course_name => course.name,:need_anonymous_comments_count=>need_anonymous_comments_count,:student_commit_number=>student_commit_number,:news_count=> news_count,:message_count=>message_count}
+ end
+ end
+ result
end
private
def show_homework_info course,bid,current_user,is_course_teacher
- author = bid.author.lastname + bid.author.firstname
+ author_real_name = bid.author.lastname + bid.author.firstname
many_times = course.homeworks.index(bid) + 1
name = bid.name
homework_count = bid.homeworks.count #已提交的作业数量
@@ -363,8 +383,9 @@ class CoursesService
end
#end
open_anonymous_evaluation = bid.open_anonymous_evaluation
- {:course_name => course.name,:id => bid.id, :course_teacher => author, :homework_times => many_times, :homework_name => name, :homework_count => homework_count,:student_questions_count => student_questions_count,
- :description => description, :homework_state => state,:open_anonymous_evaluation => open_anonymous_evaluation,:homework_for_anonymous_comments => homework_for_anonymous_comments}
+ {:course_name => course.name,:course_id => course.id,:id => bid.id, :author => bid.author,:author_real_name => author_real_name, :homework_times => many_times, :homework_name => name, :homework_count => homework_count,:student_questions_count => student_questions_count,
+ :description => description, :homework_state => state,:open_anonymous_evaluation => open_anonymous_evaluation,:homework_for_anonymous_comments => homework_for_anonymous_comments,:created_on => bid.created_on,:deadline => bid.deadline}
+
end
#显示作业列表的同时显示分配给当前学生匿评的作业
diff --git a/app/services/homework_service.rb b/app/services/homework_service.rb
index b468e1110..6287eeef5 100644
--- a/app/services/homework_service.rb
+++ b/app/services/homework_service.rb
@@ -6,6 +6,7 @@ class HomeworkService
include WordsHelper
include ApiHelper
include HomeworkAttachHelper
+ include CoursesHelper
# 作业详情(老师才显示启动匿评,学生不显示 )
# many_times 第几次(作业)
@@ -25,8 +26,8 @@ class HomeworkService
state = @bid.comment_status
#end
open_anonymous_evaluation = @bid.open_anonymous_evaluation
- {:course_name => course.name,:id => @bid.id, :course_teacher => author, :homework_times => many_times, :homework_name => name, :homework_count => homework_count,:student_questions_count => student_questions_count,
- :description => description, :homework_state => state,:open_anonymous_evaluation => open_anonymous_evaluation}
+ {:course_name => course.name,:course_id => course.id,:id => @bid.id, :author => @bid.author,:author_real_name =>author, :homework_times => many_times, :homework_name => name, :homework_count => homework_count,:student_questions_count => student_questions_count,
+ :description => description, :homework_state => state,:open_anonymous_evaluation => open_anonymous_evaluation,:created_on => @bid.created_on,:deadline => @bid.deadline}
end
# 启动作业匿评前提示信息
@@ -273,10 +274,26 @@ class HomeworkService
membership.each do |mp|
my_homeworks = []
mp.course.homeworks.each do |bid|
- hw = bid.homeworks.where("user_id = #{current_user.id}")
- my_homeworks << hw[0] unless (hw.nil? || hw[0].nil?)
+ #hw = bid.homeworks.where("user_id = #{current_user.id}")
+ @bid = bid
+ course = @bid.courses.first
+ author = @bid.author.lastname + @bid.author.firstname
+ many_times = course.homeworks.index(@bid) + 1
+ name = @bid.name
+ homework_count = @bid.homeworks.count #已提交的作业数量
+ student_questions_count = @bid.commit.nil? ? 0 : @bid.commit
+ description = @bid.description
+ #if is_course_teacher(User.current, course) && @bid.open_anonymous_evaluation == 1 && @bid.homeworks.count >= 2
+ state = @bid.comment_status
+ #end
+ open_anonymous_evaluation = @bid.open_anonymous_evaluation
+
+ my_homeworks << {:course_name => course.name,:id => @bid.id, :author => @bid.author,:author_real_name =>author, :homework_times => many_times, :homework_name => name, :homework_count => homework_count,:student_questions_count => student_questions_count,
+ :description => description, :homework_state => state,:open_anonymous_evaluation => open_anonymous_evaluation}
+ end
+ if mp.course.homeworks.count != 0
+ course_list << {:course => mp.course,:img_url => url_to_avatar(mp.course),:my_homework => my_homeworks,:current_user_is_member => current_user.member_of_course?(mp.course),:current_user_is_teacher => is_course_teacher(current_user,mp.course)}
end
- course_list << {:course => mp.course,:img_url => url_to_avatar(mp.course),:my_homework => my_homeworks}
end
course_list
end
diff --git a/app/services/users_service.rb b/app/services/users_service.rb
index 8cca137ee..8cadf031c 100644
--- a/app/services/users_service.rb
+++ b/app/services/users_service.rb
@@ -44,7 +44,31 @@ class UsersService
#location = get_user_location @user
#{:id => @user.id, :img_url => img_url, :nickname => @user.login, :gender => gender, :work_unit => work_unit, :mail => @user.mail, :location => location, :brief_introduction => @user.user_extensions.brief_introduction}
end
+ def register_auto(login,mail,password)
+ @user = User.new
+ @user.admin = false
+ @user.register
+ @user.login = login
+ @user.mail =mail
+ password_confirmation = password
+ should_confirmation_password = true
+ if !password.blank? && !password_confirmation.blank? && should_confirmation_password
+ @user.password, @user.password_confirmation = password, password_confirmation
+ elsif !password.blank? && !should_confirmation_password
+ @user.password = password
+ else
+ @user.password = ""
+ end
+ @user = automatically_register(@user)
+
+ if @user.id != nil
+ ue = @user.user_extensions ||= UserExtensions.new
+ ue.user_id = @user.id
+ ue.save
+ end
+ @user
+ end
#显示用户
#id用户id
def show_user(params)
diff --git a/app/views/layouts/base_projects.html.erb b/app/views/layouts/base_projects.html.erb
index b8cc2ddc9..d33a33a14 100644
--- a/app/views/layouts/base_projects.html.erb
+++ b/app/views/layouts/base_projects.html.erb
@@ -134,7 +134,7 @@
- <%=link_to "#{@project.issues.where('status_id in (1,2,4,6)').count}", project_issues_path(@project) %>
+ <%=link_to "#{@project.issues.where('status_id in (1,2,4,6)').count}/#{@project.issues.count}", project_issues_path(@project) %>
|
diff --git a/app/views/mailer/send_for_user_activities.html.erb b/app/views/mailer/send_for_user_activities.html.erb
new file mode 100644
index 000000000..1735bd899
--- /dev/null
+++ b/app/views/mailer/send_for_user_activities.html.erb
@@ -0,0 +1,373 @@
+
+
+
+
+
+
+
+<%= @subject %>
+
+<% if @attachments.first || @course_news.first || @bids.first ||
+ @homeworks.first || @course_journal_messages.first|| @course_messages.first %>
+
+
<%= l(:label_course_overview)%>
+ <% unless @course_news.first.nil? %>
+
+
+
+ <%= l(:label_course_news) %>
+ (<%= @course_news.count %>)
+
+
+ <% @course_news.each do |course_new|%>
+ -
+ ▪
+ [
+
+ <%= link_to truncate(course_new.course.name,length: 30,omission: '...'), course_url(course_new.course, :token => @token.value),
+ :class=> "wmail_column",
+ :style=> " font-weight: bold; display:block; float:left; color:#1b55a7;overflow:hidden; white-space: nowrap; text-overflow:ellipsis;" %>
+ ]
+
+ <%= link_to course_new.author, user_activities_url(course_new.author,:token => @token.value), :class => "wmail_name",
+ :style => "color:#fe5722; float:left;display:block; margin-right:5px; margin-left:5px; overflow:hidden; white-space: nowrap; text-overflow:ellipsis;"%>
+ <%= l(:label_project_notice) %>
+
+ <%= link_to truncate(course_new.title,length: 30,omission: '...'), news_url(course_new,:token => @token.value),
+ :class => 'wmail_info',
+ :style => "color:#5a5a5a; float:left; margin-right:5px; display:block;color:#1b55a7;overflow:hidden; white-space: nowrap; text-overflow:ellipsis;"
+ %>
+ <%= format_time(course_new.created_on) %>
+
+ <% end %>
+
+
+
+ <% end %>
+ <% if !@bids.first.nil? || !@homeworks.first.nil? %>
+
+
+ <%= l(:label_homework_overview) %>(<%= @bids.count %>)
+ <% unless @bids.first.nil?%>
+ <% @bids.each do |bid| %>
+ -
+ ▪
+ [
+
+ <%= link_to truncate(bid.courses.first.name,length: 30,omission: '...'), course_url(bid.courses.first, :token => @token.value),
+ :class=> "wmail_column",
+ :style=> " font-weight: bold; display:block; float:left; color:#1b55a7;overflow:hidden; white-space: nowrap; text-overflow:ellipsis;" %>
+ ]
+
+ <%= link_to bid.author, user_activities_url(bid.author,:token => @token.value), :class => "wmail_name",
+ :style => "color:#fe5722; float:left;display:block; margin-right:5px; margin-left:5px; overflow:hidden; white-space: nowrap; text-overflow:ellipsis;"%>
+ <%= l(:label_course_homework) %>
+
+ <%= link_to truncate(bid.name,length: 30,omission: '...'), course_for_bid_url(:id => bid.id,:token => @token.value),
+ :class => 'wmail_info',
+ :style => "color:#5a5a5a; float:left; margin-right:5px; display:block;color:#1b55a7;overflow:hidden; white-space: nowrap; text-overflow:ellipsis;"
+ %>
+ <%= format_time(bid.created_on) %>
+
+ <% end %>
+ <% end %>
+ <% unless @homeworks.first.nil? %>
+ <% @homeworks.each do |homework| %>
+ -
+ ▪
+ [
+
+ <%= link_to truncate(homework.bid.courses.first.name,length: 30,omission: '...'), course_url(homework.bid.courses.first, :token => @token.value),
+ :class=> "wmail_column",
+ :style=> " font-weight: bold; display:block; float:left; color:#1b55a7;overflow:hidden; white-space: nowrap; text-overflow:ellipsis;" %>
+ ]
+
+ <%= link_to homework.user, user_activities_url(homework.user,:token => @token.value), :class => "wmail_name",
+ :style => "color:#fe5722; float:left;display:block; margin-right:5px; margin-left:5px; overflow:hidden; white-space: nowrap; text-overflow:ellipsis;"%>
+ <%= l(:label_course_submit_homework) %>
+
+ <%= link_to truncate(homework.name,length: 30,omission: '...'), course_for_bid_url(:id => homework.bid.id,:token => @token.value),
+ :class => 'wmail_info',
+ :style => "color:#5a5a5a; float:left; margin-right:5px; display:block;color:#1b55a7;overflow:hidden; white-space: nowrap; text-overflow:ellipsis;"
+ %>
+ <%= format_time(homework.created_on) %>
+
+ <% end %>
+ <% end %>
+
+
+
+ <% end %>
+
+ <% unless @course_journal_messages.first.nil? %>
+
+
+
+ <%= l(:view_course_journals_for_messages) %>
+ (<%= @course_journal_messages.count %>)
+
+
+ <% @course_journal_messages.each do |course_journal_message|%>
+ -
+ ▪
+ [
+
+ <%= link_to truncate(course_journal_message.course.name,length: 30,omission: '...'), course_url(course_journal_message.course, :token => @token.value),
+ :class=> "wmail_column",
+ :style=> " font-weight: bold; display:block; float:left; color:#1b55a7;overflow:hidden; white-space: nowrap; text-overflow:ellipsis;" %>
+ ]
+
+ <%= link_to course_journal_message.user, user_activities_url(course_journal_message.user,:token => @token.value), :class => "wmail_name",
+ :style => "color:#fe5722; float:left;display:block; margin-right:5px; margin-left:5px; overflow:hidden; white-space: nowrap; text-overflow:ellipsis;"%>
+ <%= l(:label_send_course_journals_for_messages) %>
+
+ <%= link_to truncate(course_journal_message.notes,length: 30,omission: '...'), course_feedback_url(course_journal_message.course,:token => @token.value),
+ :class => 'wmail_info',
+ :style => "color:#5a5a5a; float:left; margin-right:5px; display:block;color:#1b55a7;overflow:hidden; white-space: nowrap; text-overflow:ellipsis;"
+ %>
+ <%= format_time(course_journal_message.created_on) %>
+
+ <% end %>
+
+
+
+ <% end %>
+
+ <% unless @course_messages.first.nil? %>
+
+
+
+ <%= l(:view_borad_course) %>
+ (<%= @course_journal_messages.count %>)
+
+
+ <% @course_messages.each do |course_message|%>
+ -
+ ▪
+ [
+
+ <%= link_to truncate(course_message.course.name,length: 30,omission: '...'), course_url(course_message.course.id, :token => @token.value),
+ :class=> "wmail_column",
+ :style=> " font-weight: bold; display:block; float:left; color:#1b55a7;overflow:hidden; white-space: nowrap; text-overflow:ellipsis;" %>
+ ]
+
+ <%= link_to course_message.author, user_activities_url(course_message.author,:token => @token.value), :class => "wmail_name",
+ :style => "color:#fe5722; float:left;display:block; margin-right:5px; margin-left:5px; overflow:hidden; white-space: nowrap; text-overflow:ellipsis;"%>
+ <%= l(:label_send_course_messages) %>
+
+ <%= link_to truncate(course_message.subject,length: 30,omission: '...'),board_message_url(course_message, :board_id => course_message.board_id,:token => @token.value),
+ :class => 'wmail_info',
+ :style => "color:#5a5a5a; float:left; margin-right:5px; display:block;color:#1b55a7;overflow:hidden; white-space: nowrap; text-overflow:ellipsis;"
+ %>
+ <%= format_time(course_message.created_on) %>
+
+ <% end %>
+
+
+
+ <% end %>
+
+ <% unless @attachments.first.nil? %>
+
+
+ <%= l(:label_course_attendingcontestwork_download) %>
+ (<%= @attachments.count %>)
+
+
+ <% @attachments.each do |attachment|%>
+ -
+ ▪
+ [
+
+ <%= link_to truncate(attachment.course.name,length: 30,omission: '...'), course_url(attachment.course, :token => @token.value),
+ :class=> "wmail_column",
+ :style=> " font-weight: bold; display:block; float:left; color:#1b55a7;overflow:hidden; white-space: nowrap; text-overflow:ellipsis;" %>
+ ]
+
+ <%= link_to attachment.author, user_activities_url(attachment.author,:token => @token.value), :class => "wmail_name",
+ :style => "color:#fe5722; float:left;display:block; margin-right:5px; margin-left:5px; overflow:hidden; white-space: nowrap; text-overflow:ellipsis;"%>
+ <%= l(:label_course_file_upload) %>
+
+ <%= link_to truncate(attachment.filename,length: 30,omission: '...'),course_files_url(attachment.course,:token => @token.value),
+ :class => 'wmail_info',
+ :style => "color:#5a5a5a; float:left; margin-right:5px; display:block;color:#1b55a7;overflow:hidden; white-space: nowrap; text-overflow:ellipsis;"
+ %>
+ <%= format_time(attachment.created_on) %>
+
+ <% end %>
+
+
+ <% end %>
+
+ <% end %>
+ <% if @issues.first || @project_messages.first %>
+
+
<%= l(:label_project_overview_new)%>
+ <% unless @issues.first.nil? %>
+
+
+ <%= l(:label_issue_tracking) %>
+ (<%= @issues.count %>)
+
+
+ <% @issues.each do |issue|%>
+ -
+ ▪
+ [
+
+ <%= link_to truncate(issue.project.name,length: 30,omission: '...'), project_url(issue.project, :token => @token.value),
+ :class=> "wmail_column",
+ :style=> " font-weight: bold; display:block; float:left; color:#1b55a7;overflow:hidden; white-space: nowrap; text-overflow:ellipsis;" %>
+ ]
+
+ <%= link_to issue.author, user_activities_url(issue.author,:token => @token.value), :class => "wmail_name",
+ :style => "color:#fe5722; float:left;display:block; margin-right:5px; margin-left:5px; overflow:hidden; white-space: nowrap; text-overflow:ellipsis;"%>
+ <%= l(:label_project_issue) %>
+
+ <%= link_to truncate(issue.subject,length: 30,omission: '...'),issue_url(issue, :token => @token.value),
+ :class => 'wmail_info',
+ :style => "color:#5a5a5a; float:left; margin-right:5px; display:block;color:#1b55a7;overflow:hidden; white-space: nowrap; text-overflow:ellipsis;"
+ %>
+ <%= format_time(issue.created_on) %>
+
+ <% end %>
+
+
+
+ <% end %>
+
+ <% unless @project_messages.first.nil? %>
+
+
+ <%= l(:project_moule_boards_show) %>
+ (<%= @project_messages.count %>)
+
+
+ <% @project_messages.each do |project_message|%>
+ -
+ ▪
+ [
+
+ <%= link_to truncate(project_message.project.name,length: 30,omission: '...'), project_url(project_message.project, :token => @token.value),
+ :class=> "wmail_column",
+ :style=> " font-weight: bold; display:block; float:left; color:#1b55a7;overflow:hidden; white-space: nowrap; text-overflow:ellipsis;" %>
+ ]
+
+ <%= link_to project_message.author, user_activities_url(project_message.author,:token => @token.value), :class => "wmail_name",
+ :style => "color:#fe5722; float:left;display:block; margin-right:5px; margin-left:5px; overflow:hidden; white-space: nowrap; text-overflow:ellipsis;"%>
+ <%= l(:label_send_course_messages) %>
+
+ <%= link_to truncate(project_message.subject,length: 30,omission: '...'),board_message_url(project_message, :board_id => project_message.board_id,:token => @token.value),
+ :class => 'wmail_info',
+ :style => "color:#5a5a5a; float:left; margin-right:5px; display:block;color:#1b55a7;overflow:hidden; white-space: nowrap; text-overflow:ellipsis;"
+ %>
+ <%= format_time(project_message.created_on) %>
+
+ <% end %>
+
+
+
+ <% end %>
+
+
+
+
+ <% end %>
+ <% unless @user_journal_messages.first.nil? %>
+
+
<%= l(:label_activities) %>
+
+
+
+ <%= l(:label_user_message) %>
+ (<%= @user_journal_messages.count %>)
+
+
+ <% @user_journal_messages.each do |user_journal_message|%>
+ -
+ ▪
+
+ <%= link_to user_journal_message.user, user_activities_url(user_journal_message.user,:token => @token.value),
+ :class => "wmail_name",
+ :style => "color:#fe5722; float:left;display:block; margin-right:5px; margin-left:5px; overflow:hidden; white-space: nowrap; text-overflow:ellipsis;" %>
+ <%= l(:label_show_your_message) %>
+
+ <%= link_to truncate(user_journal_message.notes,length: 30,omission: '...'),feedback_url(@user,:token => @token.value),
+ :class => 'wmail_info',
+ :style => "color:#5a5a5a; float:left; margin-right:5px; display:block;color:#1b55a7;overflow:hidden; white-space: nowrap; text-overflow:ellipsis;"
+ %>
+ <%= format_time(user_journal_message.created_on) %>
+
+ <% end %>
+
+
+
+
+
+ <% end %>
+ <% if @forums.first || @memos.first %>
+
+
<%= l(:lable_bar_active) %>
+ <% unless @forums.first.nil? %>
+
+
+ <%= l(:label_user_forum) %>
+ (<%= @forums.count %>)
+
+
+ <% @forums.each do |forum|%>
+ -
+ ▪
+
+ <%= link_to forum.creator, user_activities_url(forum.creator,:token => @token.value),
+ :class => "wmail_name",
+ :style => "color:#fe5722; float:left;display:block; margin-right:5px; margin-left:5px; overflow:hidden; white-space: nowrap; text-overflow:ellipsis;" %>
+ <%= l(:label_forum_new) %>
+
+ <%= link_to truncate(forum.name,length: 30,omission: '...'),forum_url(forum,:token => @token.value),
+ :class => 'wmail_info',
+ :style => "color:#5a5a5a; float:left; margin-right:5px; display:block;color:#1b55a7;overflow:hidden; white-space: nowrap; text-overflow:ellipsis;"
+ %>
+ <%= format_time(forum.created_at) %>
+
+ <% end %>
+
+
+
+ <% end %>
+ <% unless @memos.first.nil? %>
+
+
+ <%= l(:label_user_message_forum) %>
+ (<%= @memos.count %>)
+
+
+ <% @memos.each do |memo|%>
+ -
+ ▪
+
+ <%= link_to memo.author, user_activities_url(memo.author,:token => @token.value),
+ :class => "wmail_name",
+ :style => "color:#fe5722; float:left;display:block; margin-right:5px; margin-left:5px; overflow:hidden; white-space: nowrap; text-overflow:ellipsis;" %>
+ <%= memo.parent_id.nil? ? l(:label_memo_new_from_forum) : l(:label_reply) %>
+
+ <%= link_to truncate(memo.subject,length: 30,omission: '...'),forum_memo_url(memo.forum, (memo.parent_id.nil? ? memo : memo.parent_id)),
+ :class => 'wmail_info',
+ :style => "color:#5a5a5a; float:left; margin-right:5px; display:block;color:#1b55a7;overflow:hidden; white-space: nowrap; text-overflow:ellipsis;"
+ %>
+ <%= format_time(memo.created_at) %>
+
+ <% end %>
+
+
+
+ <% end %>
+
+<% end %>
+
+
+
+
+
diff --git a/app/views/mailer/send_for_user_activities.text.erb b/app/views/mailer/send_for_user_activities.text.erb
new file mode 100644
index 000000000..4de42b8aa
--- /dev/null
+++ b/app/views/mailer/send_for_user_activities.text.erb
@@ -0,0 +1,296 @@
+<%= @subject %>
+<% if @attachments.first || @course_news.first || @bids.first ||
+ @homeworks.first || @course_journal_messages.first|| @course_messages.first %>
+<%= l(:label_course_overview)%>
+ <% unless @course_news.first.nil? %>
+ <%= l(:label_course_news) %>
+ (<%= @course_news.count %>)
+
+
+ <% @course_news.each do |course_new|%>
+
+ ▪
+ [
+
+ <%= link_to truncate(course_new.course.name,length: 30,omission: '...'), course_url(course_new.course, :token => @token.value)%>
+ ]
+
+ <%= link_to course_new.author, user_activities_url(course_new.author,:token => @token.value)
+ %>
+ <%= l(:label_project_notice) %>
+
+ <%= link_to truncate(course_new.title,length: 30,omission: '...'), news_url(course_new,:token => @token.value)
+
+
+ %> <%= format_time(course_new.created_on) %>
+
+ <% end %>
+
+ <% end %>
+ <% if !@bids.first.nil? || !@homeworks.first.nil? %>
+ <%= l(:label_homework_overview) %><%= @bids.count %>
+ <% unless @bids.first.nil?%>
+ <% @bids.each do |bid| %>
+ ▪
+ [
+
+ <%= link_to truncate(bid.courses.first.name,length: 30,omission: '...'),course_url(bid.courses.first, :token => @token.value)
+
+ %>
+ ]
+
+ <%= link_to bid.author, user_activities_url(bid.author,:token => @token.value)
+ %>
+ <%= l(:label_course_homework) %>
+
+ <%= link_to truncate(bid.name,length: 30,omission: '...'), course_for_bid_url(bid,:token => @token.value)
+
+
+ %>
+ <%= format_time(bid.created_on) %>
+
+ <% end %>
+ <% end %>
+ <% unless @homeworks.first.nil? %>
+ <% @homeworks.each do |homework| %>
+ ▪[
+
+ <%= link_to truncate(homework.bid.courses.first.name,length: 30,omission: '...'), course_url(homework.bid.courses.first, :token => @token.value)
+
+ %>
+ ]
+
+ <%= link_to homework.user, user_activities_url(homework.user,:token => @token.value)
+ %>
+ <%= l(:label_course_submit_homework) %>
+
+ <%= link_to truncate(homework.name,length: 30,omission: '...'), course_for_bid_url(homework.bid,:token => @token.value)
+
+
+ %>
+ <%= format_time(homework.created_at) %>
+ <% end %>
+ <% end %>
+
+
+
+ <% end %>
+
+ <% unless @course_journal_messages.first.nil? %>
+
+ <%= l(:view_course_journals_for_messages) %> (<%= @course_journal_messages.count %>)
+
+
+ <% @course_journal_messages.each do |course_journal_message|%>
+
+ [
+
+ <%= link_to truncate(course_journal_message.course.name,length: 30,omission: '...'), course_url(course_journal_message.course, :token => @token.value)
+
+ %>
+ ]
+
+ <%= link_to course_journal_message.user, user_activities_url(course_journal_message.user,:token => @token.value)
+ %>
+ <%= l(:label_send_course_journals_for_messages) %>
+
+ <%= link_to truncate(course_journal_message.notes,length: 30,omission: '...'), course_feedback_url(course_journal_message.course,:token => @token.value)
+
+
+ %>
+ <%= format_time(course_journal_message.created_on) %>
+
+ <% end %>
+
+
+ <% end %>
+
+ <% unless @course_messages.first.nil? %>
+
+ <%= l(:view_borad_course) %>
+ (<%= @course_journal_messages.count %>)
+
+
+ <% @course_messages.each do |course_message|%>
+
+ ▪
+ [
+
+ <%= link_to truncate(course_message.course.name,length: 30,omission: '...'), course_url(course_message.course, :token => @token.value)
+
+ %>
+ ]
+
+ <%= link_to course_message.author, user_activities_url(course_message.author,:token => @token.value)
+ %>
+ <%= l(:label_send_course_messages) %>
+
+ <%= link_to truncate(course_message.subject,length: 30,omission: '...'),board_message_url(course_message, :board_id => course_message.board_id,:token => @token.value)
+
+
+ %>
+ <%= format_time(course_message.created_on) %>
+
+ <% end %>
+
+
+ <% end %>
+
+ <% unless @attachments.first.nil? %>
+
+ <%= l(:label_course_attendingcontestwork_download) %>
+ (<%= @attachments.count %>)
+
+
+ <% @attachments.each do |attachment|%>
+ ▪[
+
+ <%= link_to truncate(attachment.course.name,length: 30,omission: '...'), course_url(attachment.course, :token => @token.value)
+
+ %>
+ ]
+
+ <%= link_to attachment.author, user_activities_url(attachment.author,:token => @token.value)
+ %>
+ <%= l(:label_course_file_upload) %>
+
+ <%= link_to truncate(attachment.filename,length: 30,omission: '...'),course_files_url(attachment.course,:token => @token.value)
+
+
+ %>
+ <%= format_time(attachment.created_on) %>
+
+ <% end %>
+
+
+ <% end %>
+<% end %>
+
+<% @issues.first || @project_messages.first %>
+<%= l(:label_project_overview_new)%>
+ <% unless @issues.first.nil? %>
+
+ <%= l(:label_issue_tracking) %>
+ (<%= @issues.count %>)
+
+ <% @issues.each do |issue|%>
+ ▪
+ [
+
+ <%= link_to truncate(issue.project.name,length: 30,omission: '...'), project_url(issue.project, :token => @token.value)
+
+ %>
+ ]
+
+ <%= link_to issue.author, user_activities_url(issue.author,:token => @token.value)
+ %>
+ <%= l(:label_project_issue) %>
+
+ <%= link_to truncate(issue. subject,length: 30,omission: '...'),issue_url(issue, :token => @token.value)
+
+
+ %>
+ <%= format_time(issue.created_on) %>
+ <% end %>
+
+
+ <% end %>
+
+ <% unless @project_messages.first.nil? %>
+
+ <%= l(:project_moule_boards_show) %>
+ (<%= @project_messages.count %>)
+
+ <% @project_messages.each do |project_message|%>
+ ▪[
+
+ <%= link_to truncate(project_message.project.name,length: 30,omission: '...'), project_url(project_message.project, :token => @token.value)
+
+ %>
+ ]
+
+ <%= link_to project_message.author, board_message_url(project_message, :board_id => project_message.board_id,:token => @token.value)
+ %>
+ <%= l(:label_send_course_messages) %>
+
+ <%= link_to truncate(project_message. subject,length: 30,omission: '...'),board_message_url(project_message, :board_id => project_message.board_id,:token => @token.value)
+
+
+ %>
+ <%= format_time(project_message.created_on) %>
+ <% end %>
+
+
+ <% end %>
+
+
+<% unless @user_journal_messages.first.nil? %>
+<%= l(:label_activities) %>
+
+
+ <%= l(:label_user_message) %>
+ (<%= @user_journal_messages.count %>)
+
+ <% @user_journal_messages.each do |user_journal_message|%>
+ ▪
+
+ <%= link_to user_journal_message.user, user_activities_url(user_journal_message.user,:token => @token.value)
+
+ %>
+ <%= l(:label_show_your_message) %>
+
+ <%= link_to truncate(user_journal_message.notes,length: 30,omission: '...'), feedback_url(@user,:token => @token.value)
+
+
+ %>
+ <%= format_time(user_journal_message.created_on) %>
+
+ <% end %>
+
+
+<% end %>
+<% if @forums.first || @memos.first %>
+ <%= l(:lable_bar_active) %>
+ <% unless @forums.first.nil? %>
+
+ <%= l(:label_user_forum) %>
+ (<%= @forums.count %>)
+
+ <% @forums.each do |forum|%>
+ ▪
+
+ <%= link_to forum.creator, user_activities_url(forum.creator,:token => @token.value) %>
+ <%= l(:label_forum_new) %>
+
+ <%= link_to truncate(forum.name,length: 30,omission: '...'),forum_url(forum,:token => @token.value)
+ %>
+ <%= format_time(forum.created_at) %>
+
+ <% end %>
+
+
+ <% end %>
+ <% unless @memos.first.nil? %>
+
+ <%= l(:label_user_message_forum) %>
+ (<%= @memos.count %>)
+
+ <% @memos.each do |memo|%>
+ ▪
+
+ <%= link_to memo.author, user_activities_url(memo.author,:token => @token.value)%>
+ <%= memo.parent_id.nil? ? l(:label_memo_new_from_forum) : l(:label_reply) %>
+
+ <%= link_to truncate(memo.subject,length: 30,omission: '...'),forum_memo_url(memo.forum, (memo.parent_id.nil? ? memo : memo.parent_id))
+ %>
+ <%= format_time(memo.created_at) %>
+
+ <% end %>
+
+
+
+ <% end %>
+
+<% end %>
+
+ <%= link_to l(:mail_footer), @user_url, :style => "margin-top:20px;color:#2775d2; margin-left:10px;" %>
diff --git a/app/views/mailer/send_invite_in_project.html.erb b/app/views/mailer/send_invite_in_project.html.erb
new file mode 100644
index 000000000..980adc12d
--- /dev/null
+++ b/app/views/mailer/send_invite_in_project.html.erb
@@ -0,0 +1,17 @@
+
+
+
+
+ - <%= l(:mail_issue_content)%>
+
+
<%= @subject %>
+ <%= link_to @project_url, @project_url%>
+
+
+
+
+
+
+
+
+
diff --git a/app/views/mailer/send_invite_in_project.text.erb b/app/views/mailer/send_invite_in_project.text.erb
new file mode 100644
index 000000000..6ee038055
--- /dev/null
+++ b/app/views/mailer/send_invite_in_project.text.erb
@@ -0,0 +1,2 @@
+<%= @subject %>
+<%= link_to @project_url, @project_url%>
\ No newline at end of file
diff --git a/app/views/users/user_watchlist.html.erb b/app/views/users/user_watchlist.html.erb
index e58e35d27..b6e889b91 100644
--- a/app/views/users/user_watchlist.html.erb
+++ b/app/views/users/user_watchlist.html.erb
@@ -1,47 +1,63 @@
<%= l(:label_user_watcher)%>
<% for user in User.watched_by(@user.id) %>
-
-
-
-
- <%= link_to image_tag(url_to_avatar(user), :class => "avatar"), user_path(user), :title => "#{user.name}" %> |
-
-
- <%= content_tag "div", link_to(user.name,user_path(user)), :class => "project_avatar_name" %>
- |
-
-
-
- <%# unless user.memberships.empty? %>
- <% cond = Project.visible_condition(User.current) + " AND projects.project_type <> 1" %>
- <% memberships = user.memberships.all(:conditions => cond) %>
- <%= l(:label_x_contribute_to, :count => memberships.count) %>
- <% for member in memberships %>
- <%= link_to_project(member.project) %><%= (user.memberships.last == member) ? '' : ',' %>
- <% end %>
- <%# end %>
-
-
- <%# unless user.memberships.empty? %>
- <% user_courses = user_courses_list(user) %>
- <%= l(:label_x_course_contribute_to, :count => user_courses.count) %>
- <% for course in user_courses %>
- <%# if course.name != nil %>
- <%= link_to course.name,{:controller => 'courses',:action => 'show',id:course.id, host: Setting.course_domain} %><%= (user_courses.last == course) ? '' : ',' %>
- <%# end %>
- <% end %>
- <%# end %>
-
- |
-
-
-
-
- <%= l(:label_user_joinin) %><%= format_date(user.created_on) %>
- |
-
- |
-
-
+
+ -
+
+
+
+ <%= link_to image_tag(url_to_avatar(user), :class => "avatar"), user_path(user), :title => "#{user.name}" %>
+ |
+
+
+
+
+
+ <%= content_tag "div", link_to(user.name,user_path(user)), :class => "project_avatar_name" %>
+
+ |
+
+
+
+
+ <%# unless user.memberships.empty? %>
+ <% cond = Project.visible_condition(User.current) + " AND projects.project_type <> 1" %>
+ <% memberships = user.memberships.all(:conditions => cond) %>
+ <%= l(:label_x_contribute_to, :count => memberships.count) %>
+ <%= ":" unless memberships.empty? %>
+ <% for member in memberships %>
+ <%= link_to_project(member.project) %>
+ <%= (user.memberships.last == member) ? '' : ',' %>
+ <% end %>
+ <%# end %>
+
+
+ <%# unless user.memberships.empty? %>
+ <% user_courses = user_courses_list(user) %>
+ <%= l(:label_x_course_contribute_to, :count => user_courses.count) %>
+ <%= ":" unless user_courses.empty? %>
+ <% for course in user_courses %>
+ <%# if course.name != nil %>
+ <%= link_to course.name,{:controller => 'courses',:action => 'show',id:course.id, host: Setting.course_domain} %><%= (user_courses.last == course) ? '' : ',' %>
+ <%# end %>
+ <% end %>
+ <%# end %>
+
+ |
+
+
+
+
+
+ <%= l(:label_user_joinin) %>
+ <%= format_date(user.created_on) %>
+ |
+
+
+ |
+
+
+
+
<% end %>
\ No newline at end of file
diff --git a/app/views/welcome/course.html.erb b/app/views/welcome/course.html.erb
index d427a9aea..95837cb6d 100644
--- a/app/views/welcome/course.html.erb
+++ b/app/views/welcome/course.html.erb
@@ -96,11 +96,11 @@
year_now -= 1
course_term = "秋季学期"
elsif month_now < 9
- course_term = "秋季学期"
+ course_term = "春季学期"
end
%>
<%# (month_now >= 3 && month_now < 9) ? course_term = "春季学期" : course_term = "秋季学期" %>
- <% @school_id.nil? ? cur_school_course = [] : cur_school_course = find_miracle_course(10,7,@school_id, year_now, course_term) %>
+ <% cur_school_course = @school_id.nil? ? [] : find_miracle_course(10,7,@school_id, year_now, course_term) %>
<% if cur_school_course.count == 0 %>
@@ -108,12 +108,13 @@
- <% if User.current.logged? %>
+ <%# if User.current.logged? %>
-
<%= render :partial => 'no_course_title', :locals => {:course_title => l(:label_school_no_course)} %>
- <% end %>
- <% User.current.logged? ? course_count = 9 : course_count = 10 %>
+ <%# end %>
+ <%# User.current.logged? ? course_count = 9 : course_count = 10 %>
+ <% course_count = 9 %>
<% all_new_hot_course = find_all_new_hot_course(course_count, @school_id, year_now, course_term)%>
<% while all_new_hot_course.count < course_count%>
<% if course_term == "春季学期"
diff --git a/config/application.rb b/config/application.rb
index 000250a61..3b36d7cb7 100644
--- a/config/application.rb
+++ b/config/application.rb
@@ -17,9 +17,7 @@ module RedmineApp
# -- all .rb files in that directory are automatically loaded.
#verifier if email is real
- # EmailVerifier.config do |config|
- # config.verifier_email = "lizanle521@126.com"
- # end
+
config.generators do |g|
g.test_framework :rspec,
diff --git a/config/database.yml.example b/config/database.yml.example
deleted file mode 100644
index 2ff6231d4..000000000
--- a/config/database.yml.example
+++ /dev/null
@@ -1,52 +0,0 @@
-# Default setup is given for MySQL with ruby1.9. If you're running Redmine
-# with MySQL and ruby1.8, replace the adapter name with `mysql`.
-# Examples for PostgreSQL, SQLite3 and SQL Server can be found at the end.
-# Line indentation must be 2 spaces (no tabs).
-
-production:
- adapter: mysql2
- database: redmine
- host: localhost
- username: root
- password: ""
- encoding: utf8
-
-development:
- adapter: mysql2
- database: redmine_development
- host: 10.107.17.20
- username: root
- password: "1234"
- encoding: utf8
-
-# Warning: The database defined as "test" will be erased and
-# re-generated from your development database when you run "rake".
-# Do not set this db to the same as development or production.
-test:
- adapter: mysql2
- database: redmine_test
- host: 10.107.17.20
- username: root
- password: "1234"
- encoding: utf8
-
-# PostgreSQL configuration example
-#production:
-# adapter: postgresql
-# database: redmine
-# host: localhost
-# username: postgres
-# password: "postgres"
-
-# SQLite3 configuration example
-#production:
-# adapter: sqlite3
-# database: db/redmine.sqlite3
-
-# SQL Server configuration example
-#production:
-# adapter: sqlserver
-# database: redmine
-# host: localhost
-# username: jenkins
-# password: jenkins
diff --git a/config/initializers/send_mail.rb b/config/initializers/send_mail.rb
new file mode 100644
index 000000000..86b3a53fd
--- /dev/null
+++ b/config/initializers/send_mail.rb
@@ -0,0 +1,27 @@
+#!/usr/bin/env ruby
+
+require 'rubygems'
+require 'rufus-scheduler'
+
+#users = User.where("mail_notification = 'week' or mail_notification = 'day'")
+
+scheduler = Rufus::Scheduler.new
+scheduler.cron('0 0 * * 1') do
+ users = User.where("mail_notification = 'week'")
+ users.each do |user|
+ #Rails.logger.info "send mail to #{user.show_name}(#{user.mail}) at #{Time.now}"
+ Thread.start do
+ Mailer.send_for_user_activities(user, Date.today, 7).deliver
+ end
+ end
+end
+scheduler.cron('0 0 * * *') do
+ users = User.where("mail_notification = 'day'")
+ users.each do |user|
+ #Rails.logger.info "send mail to #{user.show_name}(#{user.mail}) at #{Time.now}"
+ Thread.start do
+ Mailer.send_for_user_activities(user, Date.today, 1).deliver
+ end
+ end
+end
+
diff --git a/config/initializers/session_store.rb b/config/initializers/session_store.rb
new file mode 100644
index 000000000..603b22a9f
--- /dev/null
+++ b/config/initializers/session_store.rb
@@ -0,0 +1 @@
+Rails.application.config.session_store ActionDispatch::Session::CacheStore, :expire_after => 20.minutes
\ No newline at end of file
diff --git a/config/locales/en.yml b/config/locales/en.yml
index 0b2bb94ee..857259392 100644
--- a/config/locales/en.yml
+++ b/config/locales/en.yml
@@ -434,8 +434,8 @@ en:
label_document_new: New document
label_document_plural: Documents
label_document_added: Document added
- label_forum_message_added: Message added
- label_forum_add: Forum added
+ label_forum_message_added: Forum's message added
+ #label_forum_add: Forum added
label_document_public_info: "If you don't choose public, only the project's members can see the document."
label_role: Role
label_role_plural: Roles
diff --git a/config/locales/zh.yml b/config/locales/zh.yml
index 0f5e3f0f4..aa153cda6 100644
--- a/config/locales/zh.yml
+++ b/config/locales/zh.yml
@@ -248,6 +248,9 @@ zh:
# end
field_name: 名称
field_enterprise_name: 组织名称
+
+ label_week_mail: 一周动态
+ label_day_mail: 一日动态
#added by huang
field_tea_name: 教师
field_couurse_time: 学时
@@ -497,6 +500,9 @@ zh:
permission_paret_in_homework: 加入作业
permission_view_homework_attaches: 查看作业附件
permission_view_course_journals_for_messages: 查看课程留言
+ view_course_journals_for_messages: 课程留言
+ label_send_course_journals_for_messages: 发布了留言
+ label_send_course_messages: 发布了讨论
permission_select_course_modules: 选择课程模块
permission_view_course_files: 查看课程资源
permission_add_course: 新建课程
@@ -511,6 +517,7 @@ zh:
permission_upload_attachments: 资源上传
project_module_issue_tracking: 问题跟踪
+ project_moule_boards_show: 项目论坛
project_module_time_tracking: 时间跟踪
project_module_news: 新闻
project_module_documents: 文档
@@ -566,7 +573,7 @@ zh:
label_document_new: 新建文档
label_document_plural: 文档
label_document_added: 文档已添加
- label_forum_message_added: 发帖成功
+ label_forum_message_added: 贴吧发帖成功
label_forum_add: 贴吧创建成功
label_message_reply: 回帖人
label_document_public_info: (打钩为公开,不打钩则不公开,若不公开,仅项目成员可见该文档。)
@@ -657,6 +664,10 @@ zh:
label_user_login_attending_contest: 您还没有登录,请登录后参赛
label_user_login_score_and_comment: 您还没有登录,请登录后对作品进行打分评价
label_user_login_notificationcomment: 您还没有登录,请登录后参加评论
+ label_user_forum: 您的贴吧
+ label_user_message_forum: 您的帖子
+ label_user_message: 您的留言
+ label_show_your_message: 给您的留言
#end
#by huang # modified by bai
label_college: 高校进入
@@ -727,6 +738,7 @@ zh:
label_attachment: 文件
label_attachment_new: 新建文件
label_file_upload: 上传资料
+ label_course_file_upload: 上传了课件
label_attachment_delete: 删除文件
label_attachment_plural: 文件
label_file_added: 文件已添加
@@ -745,6 +757,9 @@ zh:
label_settings: 配置
label_overview: 近期动态
label_course_overview: "课程动态"
+ label_project_overview_new: "项目动态"
+ label_forums_overview: "贴吧动态"
+ label_homework_overview: 作业动态
label_question_student: 作业交流 #bai
label_homework_commit: 提交作业 #huang
label_homework_info: 提交情况 #huang
@@ -956,7 +971,9 @@ zh:
label_theme: 主题
label_default: 默认
label_search_titles_only: 仅在标题中搜索
- label_user_mail_option_all: "收取我的项目的所有通知"
+ label_user_mail_option_all: "收取我的所有通知"
+ label_user_mail_option_week: "按周收取我的所有通知"
+ label_user_mail_option_day: "按天收取我的所有通知"
label_must_answer: "必答"
label_poll_title: 问卷调查_问卷页面
#huang
@@ -992,6 +1009,7 @@ zh:
label_project_newother: "查看其他评论"
label_project_newshare: "分享了"
label_project_notice: "发布了通知:"
+ label_project_issue: "发布了问题:"
label_project_newadd: "添加了"
label_project_unadd: "暂无项目,赶快去创建吧!"
label_project_un: "该用户暂未参与任何项目!"
@@ -1402,9 +1420,9 @@ zh:
label_contribute_to: 参与了 %{project_count} 个项目:
#modify by men
label_x_contribute_to:
- zero: 参与了 %{count} 个项目:
- one: 参与了 %{count} 个项目:
- other: 参与了 %{count} 个项目:
+ zero: 参与了 %{count} 个项目
+ one: 参与了 %{count} 个项目
+ other: 参与了 %{count} 个项目
#end
label_total_commit: 共%{total_commit}次提交
label_question_number: 第%{question_number}题:
@@ -1591,6 +1609,7 @@ zh:
label_exist_repository_path: 定义已有版本库URL路径,定义格式file://, http://, https://, svn://
label_project_no_activity: 该项目暂无动态!
label_course_homework_un: 暂未发布任何作业
+ label_course_homework: 发布了作业
label_follow_no_requirement: 暂未关注任何需求!
label_no_user_respond_you: 暂无任何用户对您进行反馈!
label_tags_issue: 问题名称:
@@ -1662,6 +1681,7 @@ zh:
label_project_no_follow: 该项目暂未被关注!
label_no_bid_project: 暂无参与项目
label_no_course_project: 暂无已提交的作业!
+ label_course_submit_homework: 提交了作业
label_bids_reward_method: 奖励方式 :
label_bids_reward_what: 输入奖励内容
label_call_bonus: 奖金
@@ -1769,6 +1789,7 @@ zh:
label_wiki_number: wiki的数量
label_message_number: 留言的数量
label_activity_number: 个人动态数量
+ label_activities: 个人动态
label_issue_message_number: 对issue的留言数量
label_code_submit_number: 代码提交次数
label_topic_number: 讨论区发言数量
@@ -1788,9 +1809,9 @@ zh:
label_course_contribute_to: 参与了 %{project_count} 个项目:
label_x_course_contribute_to:
- zero: "参与了 %{count} 个课程:"
- one: "参与了 %{count} 个课程:"
- other: "参与了 %{count} 个课程:"
+ zero: "参与了 %{count} 个课程"
+ one: "参与了 %{count} 个课程"
+ other: "参与了 %{count} 个课程"
label_join_contest: 加入竞赛
label_exit_contest: 退出竞赛
@@ -1896,6 +1917,7 @@ zh:
label_bids_task_list: 作业列表
label_join_course: 加入
+ label_invite_project: 邀请您加入项目
label_exit_course: 退出
label_exit_group: 退出当前分班
label_new_join: 加入
@@ -1954,6 +1976,7 @@ zh:
label_hot_project: '热门项目'
label_borad_project: 项目讨论区
label_borad_course: 课程讨论区
+ view_borad_course: 课程讨论
label_memo_create_succ: 发布成功
label_memo_create_fail: 发布失败
label_forum_create_succ: 贴吧新建成功
@@ -2117,6 +2140,7 @@ zh:
label_attendingcontestwork_release_person: 发布人员
label_attendingcontestwork_adaptive_system: 系统支持
label_attendingcontestwork_download: 作品下载
+ label_course_attendingcontestwork_download: 课件下载
label_attendingcontestwork_developers: 开发人员
label_attendingcontestwork_average_scores: 平均评分
label_attendingcontestwork_release_time: 发布时间
@@ -2376,7 +2400,7 @@ zh:
mail_issue_from_project: "项目问题跟踪"
mail_issue_attachments: "附件:"
mail_issue_reply: "我要回复"
-
+ mail_footer: "退订Trustie社区任务提醒?"
# 课程资源上传
# edit by meng
# 课程资源上传>
diff --git a/lib/dalli-2.7.2/.yardoc/checksums b/lib/dalli-2.7.2/.yardoc/checksums
new file mode 100644
index 000000000..10e718cb2
--- /dev/null
+++ b/lib/dalli-2.7.2/.yardoc/checksums
@@ -0,0 +1,13 @@
+lib/dalli.rb b1fd9d39df06608fcae5bcf46e9940f95b186d22
+lib/dalli/ring.rb e2cd42d8b963e669e2c8a83791fa56ec94f9ec55
+lib/dalli/server.rb a42d734f9b3d654886c86f04fb4f0352e7147b1e
+lib/dalli/client.rb 11afa0d702c68a151c87ea6e7ccdc863cf03884f
+lib/dalli/socket.rb 18b7243332ec2dafa9a17c195944e321b684e67e
+lib/dalli/options.rb d7ecb4c52b4ae2b222f319813234297e0951f82a
+lib/dalli/version.rb fff3231b7f52d7fa1dabb78bf1f67dcef95c5378
+lib/dalli/railtie.rb 63dc0fe85790a10225e867774f2c611d1c1ac46c
+lib/dalli/compressor.rb 13b0cf3f607bd8bc9f969679b0b6e9dcb0a059d7
+lib/dalli/cas/client.rb 983ded7ec738ed4502658150123e9c5ad7e3faa1
+lib/rack/session/dalli.rb 2696ad72e8f9d7f5ceb232db0c8d9a8916192edb
+lib/active_support/cache/dalli_store.rb 010d880e0f297d92b26c8f44e446add9d4fedfa2
+lib/action_dispatch/middleware/session/dalli_store.rb 62236273ea28a91502871f31aa600e038358931a
diff --git a/lib/dalli-2.7.2/.yardoc/object_types b/lib/dalli-2.7.2/.yardoc/object_types
new file mode 100644
index 000000000..81127d629
Binary files /dev/null and b/lib/dalli-2.7.2/.yardoc/object_types differ
diff --git a/lib/dalli-2.7.2/.yardoc/objects/root.dat b/lib/dalli-2.7.2/.yardoc/objects/root.dat
new file mode 100644
index 000000000..9e3295952
Binary files /dev/null and b/lib/dalli-2.7.2/.yardoc/objects/root.dat differ
diff --git a/lib/dalli-2.7.2/.yardoc/proxy_types b/lib/dalli-2.7.2/.yardoc/proxy_types
new file mode 100644
index 000000000..beefda1ae
Binary files /dev/null and b/lib/dalli-2.7.2/.yardoc/proxy_types differ
diff --git a/lib/dalli-2.7.2/Gemfile b/lib/dalli-2.7.2/Gemfile
new file mode 100644
index 000000000..aeb2bb358
--- /dev/null
+++ b/lib/dalli-2.7.2/Gemfile
@@ -0,0 +1,12 @@
+source 'https://rubygems.org'
+
+gemspec
+
+gem 'rake'
+gem 'kgio', :platform => :mri
+gem 'appraisal'
+gem 'connection_pool'
+
+group :test do
+ gem 'simplecov'
+end
diff --git a/lib/dalli-2.7.2/History.md b/lib/dalli-2.7.2/History.md
new file mode 100644
index 000000000..3ba705502
--- /dev/null
+++ b/lib/dalli-2.7.2/History.md
@@ -0,0 +1,412 @@
+Dalli Changelog
+=====================
+
+2.7.2
+==========
+
+- The fix for #423 didn't make it into the released 2.7.1 gem somehow.
+
+2.7.1
+==========
+
+- Rack session will check if servers are up on initialization (arthurnn, #423)
+- Add support for IPv6 addresses in hex form, ie: "[::1]:11211" (dplummer, #428)
+- Add symbol support for namespace (jingkai #431)
+- Support expiration intervals longer than 30 days (leonid-shevtsov #436)
+
+2.7.0
+==========
+
+- BREAKING CHANGE:
+ Dalli::Client#add and #replace now return a truthy value, not boolean true or false.
+- Multithreading support with dalli\_store:
+ Use :pool\_size to create a pool of shared, threadsafe Dalli clients in Rails:
+```ruby
+ config.cache_store = :dalli_store, "cache-1.example.com", "cache-2.example.com", :compress => true, :pool_size => 5, :expires_in => 300
+```
+ This will ensure the Rails.cache singleton does not become a source of contention.
+ **PLEASE NOTE** Rails's :mem\_cache\_store does not support pooling as of
+Rails 4.0. You must use :dalli\_store.
+
+- Implement `version` for retrieving version of connected servers [dterei, #384]
+- Implement `fetch_multi` for batched read/write [sorentwo, #380]
+- Add more support for safe updates with multiple writers: [philipmw, #395]
+ `require 'dalli/cas/client'` augments Dalli::Client with the following methods:
+ * Get value with CAS: `[value, cas] = get_cas(key)`
+ `get_cas(key) {|value, cas| ...}`
+ * Get multiple values with CAS: `get_multi_cas(k1, k2, ...) {|value, metadata| cas = metadata[:cas]}`
+ * Set value with CAS: `new_cas = set_cas(key, value, cas, ttl, options)`
+ * Replace value with CAS: `replace_cas(key, new_value, cas, ttl, options)`
+ * Delete value with CAS: `delete_cas(key, cas)`
+- Fix bug with get key with "Not found" value [uzzz, #375]
+
+2.6.4
+=======
+
+- Fix ADD command, aka `write(unless_exist: true)` (pitr, #365)
+- Upgrade test suite from mini\_shoulda to minitest.
+- Even more performance improvements for get\_multi (xaop, #331)
+
+2.6.3
+=======
+
+- Support specific stats by passing `:items` or `:slabs` to `stats` method [bukhamseen]
+- Fix 'can't modify frozen String' errors in `ActiveSupport::Cache::DalliStore` [dblock]
+- Protect against objects with custom equality checking [theron17]
+- Warn if value for key is too large to store [locriani]
+
+2.6.2
+=======
+
+- Properly handle missing RubyInline
+
+2.6.1
+=======
+
+- Add optional native C binary search for ring, add:
+
+gem 'RubyInline'
+
+ to your Gemfile to get a 10% speedup when using many servers.
+ You will see no improvement if you are only using one server.
+
+- More get_multi performance optimization [xaop, #315]
+- Add lambda support for cache namespaces [joshwlewis, #311]
+
+2.6.0
+=======
+
+- read_multi optimization, now checks local_cache [chendo, #306]
+- Re-implement get_multi to be non-blocking [tmm1, #295]
+- Add `dalli` accessor to dalli_store to access the underlying
+Dalli::Client, for things like `get_multi`.
+- Add `Dalli::GzipCompressor`, primarily for compatibility with nginx's HttpMemcachedModule using `memcached_gzip_flag`
+
+2.5.0
+=======
+
+- Don't escape non-ASCII keys, memcached binary protocol doesn't care. [#257]
+- :dalli_store now implements LocalCache [#236]
+- Removed lots of old session_store test code, tests now all run without a default memcached server [#275]
+- Changed Dalli ActiveSupport adapter to always attempt instrumentation [brianmario, #284]
+- Change write operations (add/set/replace) to return false when value is too large to store [brianmario, #283]
+- Allowing different compressors per client [naseem]
+
+2.4.0
+=======
+- Added the ability to swap out the compressed used to [de]compress cache data [brianmario, #276]
+- Fix get\_multi performance issues with lots of memcached servers [tmm1]
+- Throw more specific exceptions [tmm1]
+- Allowing different types of serialization per client [naseem]
+
+2.3.0
+=======
+- Added the ability to swap out the serializer used to [de]serialize cache data [brianmario, #274]
+
+2.2.1
+=======
+
+- Fix issues with ENV-based connections. [#266]
+- Fix problem with SessionStore in Rails 4.0 [#265]
+
+2.2.0
+=======
+
+- Add Rack session with\_lock helper, for Rails 4.0 support [#264]
+- Accept connection string in the form of a URL (e.g., memcached://user:pass@hostname:port) [glenngillen]
+- Add touch operation [#228, uzzz]
+
+2.1.0
+=======
+
+- Add Railtie to auto-configure Dalli when included in Gemfile [#217, steveklabnik]
+
+2.0.5
+=======
+
+- Create proper keys for arrays of objects passed as keys [twinturbo, #211]
+- Handle long key with namespace [#212]
+- Add NODELAY to TCP socket options [#206]
+
+2.0.4
+=======
+
+- Dalli no longer needs to be reset after Unicorn/Passenger fork [#208]
+- Add option to re-raise errors rescued in the session and cache stores. [pitr, #200]
+- DalliStore#fetch called the block if the cached value == false [#205]
+- DalliStore should have accessible options [#195]
+- Add silence and mute support for DalliStore [#207]
+- Tracked down and fixed socket corruption due to Timeout [#146]
+
+2.0.3
+=======
+
+- Allow proper retrieval of stored `false` values [laserlemon, #197]
+- Allow non-ascii and whitespace keys, only the text protocol has those restrictions [#145]
+- Fix DalliStore#delete error-handling [#196]
+
+2.0.2
+=======
+
+- Fix all dalli\_store operations to handle nil options [#190]
+- Increment and decrement with :initial => nil now return nil (lawrencepit, #112)
+
+2.0.1
+=======
+
+- Fix nil option handling in dalli\_store#write [#188]
+
+2.0.0
+=======
+
+- Reimplemented the Rails' dalli\_store to remove use of
+ ActiveSupport::Cache::Entry which added 109 bytes overhead to every
+ value stored, was a performance bottleneck and duplicated a lot of
+ functionality already in Dalli. One benchmark went from 4.0 sec to 3.0
+ sec with the new dalli\_store. [#173]
+- Added reset\_stats operation [#155]
+- Added support for configuring keepalive on TCP connections to memcached servers (@bianster, #180)
+
+Notes:
+
+ * data stored with dalli\_store 2.x is NOT backwards compatible with 1.x.
+ Upgraders are advised to namespace their keys and roll out the 2.x
+ upgrade slowly so keys do not clash and caches are warmed.
+ `config.cache_store = :dalli_store, :expires_in => 24.hours.to_i, :namespace => 'myapp2'`
+ * data stored with plain Dalli::Client API is unchanged.
+ * removed support for dalli\_store's race\_condition\_ttl option.
+ * removed support for em-synchrony and unix socket connection options.
+ * removed support for Ruby 1.8.6
+ * removed memcache-client compability layer and upgrade documentation.
+
+
+1.1.5
+=======
+
+- Coerce input to incr/decr to integer via #to\_i [#165]
+- Convert test suite to minitest/spec (crigor, #166)
+- Fix encoding issue with keys [#162]
+- Fix double namespacing with Rails and dalli\_store. [#160]
+
+1.1.4
+=======
+
+- Use 127.0.0.1 instead of localhost as default to avoid IPv6 issues
+- Extend DalliStore's :expires\_in when :race\_condition\_ttl is also used.
+- Fix :expires\_in option not propogating from DalliStore to Client, GH-136
+- Added support for native Rack session store. Until now, Dalli's
+ session store has required Rails. Now you can use Dalli to store
+ sessions for any Rack application.
+
+ require 'rack/session/dalli'
+ use Rack::Session::Dalli, :memcache_server => 'localhost:11211', :compression => true
+
+1.1.3
+=======
+
+- Support Rails's autoloading hack for loading sessions with objects
+ whose classes have not be required yet, GH-129
+- Support Unix sockets for connectivity. Shows a 2x performance
+ increase but keep in mind they only work on localhost. (dfens)
+
+1.1.2
+=======
+
+- Fix incompatibility with latest Rack session API when destroying
+ sessions, thanks @twinge!
+
+1.1.1
+=======
+
+v1.1.0 was a bad release. Yanked.
+
+1.1.0
+=======
+
+- Remove support for Rails 2.3, add support for Rails 3.1
+- Fix socket failure retry logic, now you can restart memcached and Dalli won't complain!
+- Add support for fibered operation via em-synchrony (eliaslevy)
+- Gracefully handle write timeouts, GH-99
+- Only issue bug warning for unexpected StandardErrors, GH-102
+- Add travis-ci build support (ryanlecompte)
+- Gracefully handle errors in get_multi (michaelfairley)
+- Misc fixes from crash2burn, fphilipe, igreg, raggi
+
+1.0.5
+=======
+
+- Fix socket failure retry logic, now you can restart memcached and Dalli won't complain!
+
+1.0.4
+=======
+
+- Handle non-ASCII key content in dalli_store
+- Accept key array for read_multi in dalli_store
+- Fix multithreaded race condition in creation of mutex
+
+1.0.3
+=======
+
+- Better handling of application marshalling errors
+- Work around jruby IO#sysread compatibility issue
+
+
+1.0.2
+=======
+
+ - Allow browser session cookies (blindsey)
+ - Compatibility fixes (mwynholds)
+ - Add backwards compatibility module for memcache-client, require 'dalli/memcache-client'. It makes
+ Dalli more compatible with memcache-client and prints out a warning any time you do something that
+ is no longer supported so you can fix your code.
+
+1.0.1
+=======
+
+ - Explicitly handle application marshalling bugs, GH-56
+ - Add support for username/password as options, to allow multiple bucket access
+ from the same Ruby process, GH-52
+ - Add support for >1MB values with :value_max_bytes option, GH-54 (r-stu31)
+ - Add support for default TTL, :expires_in, in Rails 2.3. (Steven Novotny)
+ config.cache_store = :dalli_store, 'localhost:11211', {:expires_in => 4.hours}
+
+
+1.0.0
+=======
+
+Welcome gucki as a Dalli committer!
+
+ - Fix network and namespace issues in get_multi (gucki)
+ - Better handling of unmarshalling errors (mperham)
+
+0.11.2
+=======
+
+ - Major reworking of socket error and failover handling (gucki)
+ - Add basic JRuby support (mperham)
+
+0.11.1
+======
+
+ - Minor fixes, doc updates.
+ - Add optional support for kgio sockets, gives a 10-15% performance boost.
+
+0.11.0
+======
+
+Warning: this release changes how Dalli marshals data. I do not guarantee compatibility until 1.0 but I will increment the minor version every time a release breaks compatibility until 1.0.
+
+IT IS HIGHLY RECOMMENDED YOU FLUSH YOUR CACHE BEFORE UPGRADING.
+
+ - multi() now works reentrantly.
+ - Added new Dalli::Client option for default TTLs, :expires_in, defaults to 0 (aka forever).
+ - Added new Dalli::Client option, :compression, to enable auto-compression of values.
+ - Refactor how Dalli stores data on the server. Values are now tagged
+ as "marshalled" or "compressed" so they can be automatically deserialized
+ without the client having to know how they were stored.
+
+0.10.1
+======
+
+ - Prefer server config from environment, fixes Heroku session store issues (thanks JoshMcKin)
+ - Better handling of non-ASCII values (size -> bytesize)
+ - Assert that keys are ASCII only
+
+0.10.0
+======
+
+Warning: this release changed how Rails marshals data with Dalli. Unfortunately previous versions double marshalled values. It is possible that data stored with previous versions of Dalli will not work with this version.
+
+IT IS HIGHLY RECOMMENDED YOU FLUSH YOUR CACHE BEFORE UPGRADING.
+
+ - Rework how the Rails cache store does value marshalling.
+ - Rework old server version detection to avoid a socket read hang.
+ - Refactor the Rails 2.3 :dalli\_store to be closer to :mem\_cache\_store.
+ - Better documentation for session store config (plukevdh)
+
+0.9.10
+----
+
+ - Better server retry logic (next2you)
+ - Rails 3.1 compatibility (gucki)
+
+
+0.9.9
+----
+
+ - Add support for *_multi operations for add, set, replace and delete. This implements
+ pipelined network operations; Dalli disables network replies so we're not limited by
+ latency, allowing for much higher throughput.
+
+ dc = Dalli::Client.new
+ dc.multi do
+ dc.set 'a', 1
+ dc.set 'b', 2
+ dc.set 'c', 3
+ dc.delete 'd'
+ end
+ - Minor fix to set the continuum sorted by value (kangster)
+ - Implement session store with Rails 2.3. Update docs.
+
+0.9.8
+-----
+
+ - Implement namespace support
+ - Misc fixes
+
+
+0.9.7
+-----
+
+ - Small fix for NewRelic integration.
+ - Detect and fail on older memcached servers (pre-1.4).
+
+0.9.6
+-----
+
+ - Patches for Rails 3.0.1 integration.
+
+0.9.5
+-----
+
+ - Major design change - raw support is back to maximize compatibility with Rails
+ and the increment/decrement operations. You can now pass :raw => true to most methods
+ to bypass (un)marshalling.
+ - Support symbols as keys (ddollar)
+ - Rails 2.3 bug fixes
+
+
+0.9.4
+-----
+
+ - Dalli support now in rack-bug (http://github.com/brynary/rack-bug), give it a try!
+ - Namespace support for Rails 2.3 (bpardee)
+ - Bug fixes
+
+
+0.9.3
+-----
+
+ - Rails 2.3 support (beanieboi)
+ - Rails SessionStore support
+ - Passenger integration
+ - memcache-client upgrade docs, see Upgrade.md
+
+
+0.9.2
+----
+
+ - Verify proper operation in Heroku.
+
+
+0.9.1
+----
+
+ - Add fetch and cas operations (mperham)
+ - Add incr and decr operations (mperham)
+ - Initial support for SASL authentication via the MEMCACHE_{USERNAME,PASSWORD} environment variables, needed for Heroku (mperham)
+
+0.9.0
+-----
+
+ - Initial gem release.
diff --git a/lib/dalli-2.7.2/LICENSE b/lib/dalli-2.7.2/LICENSE
new file mode 100644
index 000000000..c9f5cacab
--- /dev/null
+++ b/lib/dalli-2.7.2/LICENSE
@@ -0,0 +1,20 @@
+Copyright (c) Mike Perham
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/lib/dalli-2.7.2/Performance.md b/lib/dalli-2.7.2/Performance.md
new file mode 100644
index 000000000..1079be259
--- /dev/null
+++ b/lib/dalli-2.7.2/Performance.md
@@ -0,0 +1,42 @@
+Performance
+====================
+
+Caching is all about performance, so I carefully track Dalli performance to ensure no regressions.
+You can optionally use kgio to give Dalli a 10-20% performance boost: `gem install kgio`.
+
+Note I've added some benchmarks over time to Dalli that the other libraries don't necessarily have.
+
+memcache-client
+---------------
+
+Testing 1.8.5 with ruby 1.9.3p0 (2011-10-30 revision 33570) [x86_64-darwin11.2.0]
+
+ user system total real
+ set:plain:memcache-client 1.860000 0.310000 2.170000 ( 2.188030)
+ set:ruby:memcache-client 1.830000 0.290000 2.120000 ( 2.130212)
+ get:plain:memcache-client 1.830000 0.340000 2.170000 ( 2.176156)
+ get:ruby:memcache-client 1.900000 0.330000 2.230000 ( 2.235045)
+ multiget:ruby:memcache-client 0.860000 0.120000 0.980000 ( 0.987348)
+ missing:ruby:memcache-client 1.630000 0.320000 1.950000 ( 1.954867)
+ mixed:ruby:memcache-client 3.690000 0.670000 4.360000 ( 4.364469)
+
+
+dalli
+-----
+
+Testing with Rails 3.2.1
+Using kgio socket IO
+Testing 2.0.0 with ruby 1.9.3p125 (2012-02-16 revision 34643) [x86_64-darwin11.3.0]
+
+ user system total real
+ mixed:rails:dalli 1.580000 0.570000 2.150000 ( 3.008839)
+ set:plain:dalli 0.730000 0.300000 1.030000 ( 1.567098)
+ setq:plain:dalli 0.520000 0.120000 0.640000 ( 0.634402)
+ set:ruby:dalli 0.800000 0.300000 1.100000 ( 1.640348)
+ get:plain:dalli 0.840000 0.330000 1.170000 ( 1.668425)
+ get:ruby:dalli 0.850000 0.330000 1.180000 ( 1.665716)
+ multiget:ruby:dalli 0.700000 0.260000 0.960000 ( 0.965423)
+ missing:ruby:dalli 0.720000 0.320000 1.040000 ( 1.511720)
+ mixed:ruby:dalli 1.660000 0.640000 2.300000 ( 3.320743)
+ mixedq:ruby:dalli 1.630000 0.510000 2.140000 ( 2.629734)
+ incr:ruby:dalli 0.270000 0.100000 0.370000 ( 0.547618)
diff --git a/lib/dalli-2.7.2/README.md b/lib/dalli-2.7.2/README.md
new file mode 100644
index 000000000..d53093c4d
--- /dev/null
+++ b/lib/dalli-2.7.2/README.md
@@ -0,0 +1,224 @@
+Dalli [![Build Status](https://secure.travis-ci.org/mperham/dalli.png)](http://travis-ci.org/mperham/dalli) [![Dependency Status](https://gemnasium.com/mperham/dalli.png)](https://gemnasium.com/mperham/dalli) [![Code Climate](https://codeclimate.com/github/mperham/dalli.png)](https://codeclimate.com/github/mperham/dalli)
+=====
+
+Dalli is a high performance pure Ruby client for accessing memcached servers. It works with memcached 1.4+ only as it uses the newer binary protocol. It should be considered a replacement for the memcache-client gem.
+
+The name is a variant of Salvador Dali for his famous painting [The Persistence of Memory](http://en.wikipedia.org/wiki/The_Persistence_of_Memory).
+
+![Persistence of Memory](http://www.virtualdali.com/assets/paintings/31PersistenceOfMemory.jpg)
+
+Dalli's initial development was sponsored by [CouchBase](http://www.couchbase.com/). Many thanks to them!
+
+
+Design
+------------
+
+I decided to write Dalli after maintaining memcache-client for two years for a few specific reasons:
+
+ 0. The code is mostly old and gross. The bulk of the code is a single 1000 line .rb file.
+ 1. It has a lot of options that are infrequently used which complicate the codebase.
+ 2. The implementation has no single point to attach monitoring hooks.
+ 3. Uses the old text protocol, which hurts raw performance.
+
+So a few notes. Dalli:
+
+ 0. uses the exact same algorithm to choose a server so existing memcached clusters with TBs of data will work identically to memcache-client.
+ 1. is approximately 20% faster than memcache-client (which itself was heavily optimized) in Ruby 1.9.2.
+ 2. contains explicit "chokepoint" methods which handle all requests; these can be hooked into by monitoring tools (NewRelic, Rack::Bug, etc) to track memcached usage.
+ 3. supports SASL for use in managed environments, e.g. Heroku.
+ 4. provides proper failover with recovery and adjustable timeouts
+
+
+Supported Ruby versions and implementations
+------------------------------------------------
+
+Dalli should work identically on:
+
+ * JRuby 1.6+
+ * Ruby 1.9.3+
+ * Rubinius 2.0
+
+If you have problems, please enter an issue.
+
+
+Installation and Usage
+------------------------
+
+Remember, Dalli **requires** memcached 1.4+. You can check the version with `memcached -h`. Please note that memcached that Mac OS X Snow Leopard ships with is 1.2.8 and won't work. Install 1.4.x using Homebrew with
+
+ brew install memcached
+
+On Ubuntu you can install it by running:
+
+ apt-get install memcached
+
+You can verify your installation using this piece of code:
+
+```bash
+gem install dalli
+```
+
+```ruby
+require 'dalli'
+options = { :namespace => "app_v1", :compress => true }
+dc = Dalli::Client.new('localhost:11211', options)
+dc.set('abc', 123)
+value = dc.get('abc')
+```
+
+The test suite requires memcached 1.4.3+ with SASL enabled (brew install memcached --enable-sasl ; mv /usr/bin/memcached /usr/bin/memcached.old). Currently only supports the PLAIN mechanism.
+
+Dalli has no runtime dependencies and never will. You can optionally install the 'kgio' gem to
+give Dalli a 20-30% performance boost.
+
+
+Usage with Rails 3.x and 4.x
+---------------------------
+
+In your Gemfile:
+
+```ruby
+gem 'dalli'
+```
+
+In `config/environments/production.rb`:
+
+```ruby
+config.cache_store = :dalli_store
+```
+
+Here's a more comprehensive example that sets a reasonable default for maximum cache entry lifetime (one day), enables compression for large values and namespaces all entries for this rails app. Remove the namespace if you have multiple apps which share cached values.
+
+```ruby
+config.cache_store = :dalli_store, 'cache-1.example.com', 'cache-2.example.com',
+ { :namespace => NAME_OF_RAILS_APP, :expires_in => 1.day, :compress => true }
+```
+
+If your servers are specified in `ENV["MEMCACHE_SERVERS"]` (e.g. on Heroku when using a third-party hosted addon), simply provide `nil` for the servers:
+
+```ruby
+config.cache_store = :dalli_store, nil, { :namespace => NAME_OF_RAILS_APP, :expires_in => 1.day, :compress => true }
+```
+
+To use Dalli for Rails session storage that times out after 20 minutes, in `config/initializers/session_store.rb`:
+
+For Rails >= 3.2.4:
+
+```ruby
+Rails.application.config.session_store ActionDispatch::Session::CacheStore, :expire_after => 20.minutes
+```
+
+For Rails 3.x:
+
+```ruby
+require 'action_dispatch/middleware/session/dalli_store'
+Rails.application.config.session_store :dalli_store, :memcache_server => ['host1', 'host2'], :namespace => 'sessions', :key => '_foundation_session', :expire_after => 20.minutes
+```
+
+Dalli does not support Rails 2.x.
+
+
+Multithreading and Rails
+--------------------------
+
+If you use Puma or another threaded app server, as of Dalli 2.7, you can use a pool
+of Dalli clients with Rails to ensure the `Rails.cache` singleton does not become a
+source of thread contention. You must add `gem 'connection_pool'` to your Gemfile and
+add :pool\_size to your `dalli_store` config:
+
+```ruby
+config.cache_store = :dalli_store, 'cache-1.example.com', { :pool_size => 5 }
+```
+
+You can then use the Rails cache as normal or check out a Dalli client directly from the pool:
+
+```ruby
+Rails.cache.fetch('foo', :expires_in => 300) do
+ 'bar'
+end
+
+Rails.cache.dalli.with do |client|
+ # client is a Dalli::Client instance which you can
+ # use ONLY within this block
+end
+```
+
+
+Configuration
+------------------------
+
+Dalli::Client accepts the following options. All times are in seconds.
+
+**expires_in**: Global default for key TTL. Default is 0, which means no expiry.
+
+**failover**: Boolean, if true Dalli will failover to another server if the main server for a key is down.
+
+**compress**: Boolean, if true Dalli will gzip-compress values larger than 1K.
+
+**compression_min_size**: Minimum value byte size for which to attempt compression. Default is 1K.
+
+**compression_max_size**: Maximum value byte size for which to attempt compression. Default is unlimited.
+
+**serializer**: The serializer to use for objects being stored (ex. JSON).
+Default is Marshal.
+
+**socket_timeout**: Timeout for all socket operations (connect, read, write). Default is 0.5.
+
+**socket_max_failures**: When a socket operation fails after socket_timeout, the same operation is retried. This is to not immediately mark a server down when there's a very slight network problem. Default is 2.
+
+**socket_failure_delay**: Before retrying a socket operation, the process sleeps for this amount of time. Default is 0.01. Set to nil for no delay.
+
+**down_retry_delay**: When a server has been marked down due to many failures, the server will be checked again for being alive only after this amount of time. Don't set this value to low, otherwise each request which tries the failed server might hang for the maximum **socket_timeout**. Default is 1 second.
+
+**value_max_bytes**: The maximum size of a value in memcached. Defaults to 1MB, this can be increased with memcached's -I parameter. You must also configure Dalli to allow the larger size here.
+
+**username**: The username to use for authenticating this client instance against a SASL-enabled memcached server. Heroku users should not need to use this normally.
+
+**password**: The password to use for authenticating this client instance against a SASL-enabled memcached server. Heroku users should not need to use this normally.
+
+**keepalive**: Boolean. If true, Dalli will enable keep-alive for socket connections. Default is true.
+
+**compressor**: The compressor to use for objects being stored.
+Default is zlib, implemented under `Dalli::Compressor`.
+If serving compressed data using nginx's HttpMemcachedModule, set `memcached_gzip_flag 2` and use `Dalli::GzipCompressor`
+
+Features and Changes
+------------------------
+
+By default, Dalli is thread-safe. Disable thread-safety at your own peril.
+
+Dalli does not need anything special in Unicorn/Passenger since 2.0.4.
+It will detect sockets shared with child processes and gracefully reopen the
+socket.
+
+Note that Dalli does not require ActiveSupport or Rails. You can safely use it in your own Ruby projects.
+
+[View the Client API](http://www.rubydoc.info/github/mperham/dalli/Dalli/Client)
+
+Helping Out
+-------------
+
+If you have a fix you wish to provide, please fork the code, fix in your local project and then send a pull request on github. Please ensure that you include a test which verifies your fix and update History.md with a one sentence description of your fix so you get credit as a contributor.
+
+We're not accepting new compressors. They are trivial to add in an initializer. See #385 (LZ4), #406 (Snappy)
+
+Thanks
+------------
+
+Eric Wong - for help using his [kgio](http://unicorn.bogomips.org/kgio/index.html) library.
+
+Brian Mitchell - for his remix-stash project which was helpful when implementing and testing the binary protocol support.
+
+[CouchBase](http://couchbase.com) - for their project sponsorship
+
+
+Author
+----------
+
+Mike Perham, [mikeperham.com](http://mikeperham.com), [@mperham](http://twitter.com/mperham)
+
+
+Copyright
+-----------
+
+Copyright (c) Mike Perham. See LICENSE for details.
diff --git a/lib/dalli-2.7.2/Rakefile b/lib/dalli-2.7.2/Rakefile
new file mode 100644
index 000000000..61752a9a8
--- /dev/null
+++ b/lib/dalli-2.7.2/Rakefile
@@ -0,0 +1,42 @@
+require 'appraisal'
+require 'rake/testtask'
+Rake::TestTask.new(:test) do |test|
+ test.libs << 'test'
+ test.pattern = 'test/**/test_*.rb'
+ test.warning = true
+ test.verbose = true
+end
+
+Rake::TestTask.new(:bench) do |test|
+ test.libs << 'test'
+ test.pattern = 'test/benchmark_test.rb'
+end
+
+begin
+ require 'metric_fu'
+ MetricFu::Configuration.run do |config|
+ config.rcov[:rcov_opts] << "-Itest:lib"
+ end
+rescue LoadError
+end
+
+task :default => :test
+
+task :test_all do
+ system('rake test RAILS_VERSION="~> 3.0.0"')
+ system('rake test RAILS_VERSION=">= 3.0.0"')
+end
+
+# 'gem install rdoc' to upgrade RDoc if this is giving you errors
+begin
+ require 'rdoc/task'
+ RDoc::Task.new do |rd|
+ rd.rdoc_files.include("lib/**/*.rb")
+ end
+rescue LoadError
+ puts "Unable to load rdoc, run 'gem install rdoc' to fix this."
+end
+
+require 'rake/clean'
+CLEAN.include "**/*.rbc"
+CLEAN.include "**/.DS_Store"
diff --git a/lib/dalli-2.7.2/dalli.gemspec b/lib/dalli-2.7.2/dalli.gemspec
new file mode 100644
index 000000000..3d4cf537f
--- /dev/null
+++ b/lib/dalli-2.7.2/dalli.gemspec
@@ -0,0 +1,29 @@
+require './lib/dalli/version'
+
+Gem::Specification.new do |s|
+ s.name = %q{dalli}
+ s.version = Dalli::VERSION
+ s.license = "MIT"
+
+ s.authors = ["Mike Perham"]
+ s.description = %q{High performance memcached client for Ruby}
+ s.email = %q{mperham@gmail.com}
+ s.files = Dir.glob("lib/**/*") + [
+ "LICENSE",
+ "README.md",
+ "History.md",
+ "Rakefile",
+ "Gemfile",
+ "dalli.gemspec",
+ "Performance.md",
+ ]
+ s.homepage = %q{http://github.com/mperham/dalli}
+ s.rdoc_options = ["--charset=UTF-8"]
+ s.require_paths = ["lib"]
+ s.summary = %q{High performance memcached client for Ruby}
+ s.test_files = Dir.glob("test/**/*")
+ s.add_development_dependency(%q, [">= 4.2.0"])
+ s.add_development_dependency(%q, [">= 0"])
+ s.add_development_dependency(%q, ["~> 4"])
+end
+
diff --git a/lib/dalli-2.7.2/lib/action_dispatch/middleware/session/dalli_store.rb b/lib/dalli-2.7.2/lib/action_dispatch/middleware/session/dalli_store.rb
new file mode 100644
index 000000000..abbbaa290
--- /dev/null
+++ b/lib/dalli-2.7.2/lib/action_dispatch/middleware/session/dalli_store.rb
@@ -0,0 +1,81 @@
+require 'active_support/cache'
+require 'action_dispatch/middleware/session/abstract_store'
+require 'dalli'
+
+# Dalli-based session store for Rails 3.0.
+module ActionDispatch
+ module Session
+ class DalliStore < AbstractStore
+ def initialize(app, options = {})
+ # Support old :expires option
+ options[:expire_after] ||= options[:expires]
+
+ super
+
+ @default_options = { :namespace => 'rack:session' }.merge(@default_options)
+
+ @pool = options[:cache] || begin
+ Dalli::Client.new(
+ @default_options[:memcache_server], @default_options)
+ end
+ @namespace = @default_options[:namespace]
+
+ @raise_errors = !!@default_options[:raise_errors]
+
+ super
+ end
+
+ def reset
+ @pool.reset
+ end
+
+ private
+
+ def get_session(env, sid)
+ sid = generate_sid unless sid and !sid.empty?
+ begin
+ session = @pool.get(sid) || {}
+ rescue Dalli::DalliError => ex
+ # re-raise ArgumentError so Rails' session abstract_store.rb can autoload any missing models
+ raise ArgumentError, ex.message if ex.message =~ /unmarshal/
+ Rails.logger.warn("Session::DalliStore#get: #{ex.message}")
+ session = {}
+ end
+ [sid, session]
+ end
+
+ def set_session(env, sid, session_data, options = nil)
+ options ||= env[ENV_SESSION_OPTIONS_KEY]
+ expiry = options[:expire_after]
+ @pool.set(sid, session_data, expiry)
+ sid
+ rescue Dalli::DalliError
+ Rails.logger.warn("Session::DalliStore#set: #{$!.message}")
+ raise if @raise_errors
+ false
+ end
+
+ def destroy_session(env, session_id, options)
+ begin
+ @pool.delete(session_id)
+ rescue Dalli::DalliError
+ Rails.logger.warn("Session::DalliStore#destroy_session: #{$!.message}")
+ raise if @raise_errors
+ end
+ return nil if options[:drop]
+ generate_sid
+ end
+
+ def destroy(env)
+ if sid = current_session_id(env)
+ @pool.delete(sid)
+ end
+ rescue Dalli::DalliError
+ Rails.logger.warn("Session::DalliStore#destroy: #{$!.message}")
+ raise if @raise_errors
+ false
+ end
+
+ end
+ end
+end
diff --git a/lib/dalli-2.7.2/lib/active_support/cache/dalli_store.rb b/lib/dalli-2.7.2/lib/active_support/cache/dalli_store.rb
new file mode 100644
index 000000000..fe4b80e3c
--- /dev/null
+++ b/lib/dalli-2.7.2/lib/active_support/cache/dalli_store.rb
@@ -0,0 +1,363 @@
+# encoding: ascii
+require 'dalli'
+
+module ActiveSupport
+ module Cache
+ class DalliStore
+
+ attr_reader :silence, :options
+ alias_method :silence?, :silence
+
+ # Silence the logger.
+ def silence!
+ @silence = true
+ self
+ end
+
+ # Silence the logger within a block.
+ def mute
+ previous_silence, @silence = defined?(@silence) && @silence, true
+ yield
+ ensure
+ @silence = previous_silence
+ end
+
+ ESCAPE_KEY_CHARS = /[\x00-\x20%\x7F-\xFF]/
+
+ # Creates a new DalliStore object, with the given memcached server
+ # addresses. Each address is either a host name, or a host-with-port string
+ # in the form of "host_name:port". For example:
+ #
+ # ActiveSupport::Cache::DalliStore.new("localhost", "server-downstairs.localnetwork:8229")
+ #
+ # If no addresses are specified, then DalliStore will connect to
+ # localhost port 11211 (the default memcached port).
+ #
+ # Connection Pool support
+ #
+ # If you are using multithreaded Rails, the Rails.cache singleton can become a source
+ # of contention. You can use a connection pool of Dalli clients with Rails.cache by
+ # passing :pool_size and/or :pool_timeout:
+ #
+ # config.cache_store = :dalli_store, 'localhost:11211', :pool_size => 10
+ #
+ # Both pool options default to 5. You must include the `connection_pool` gem if you
+ # wish to use pool support.
+ #
+ def initialize(*addresses)
+ addresses = addresses.flatten
+ options = addresses.extract_options!
+ @options = options.dup
+
+ pool_options = {}
+ pool_options[:size] = options[:pool_size] if options[:pool_size]
+ pool_options[:timeout] = options[:pool_timeout] if options[:pool_timeout]
+
+ @options[:compress] ||= @options[:compression]
+
+ addresses.compact!
+ servers = if addresses.empty?
+ nil # use the default from Dalli::Client
+ else
+ addresses
+ end
+ if pool_options.empty?
+ @data = Dalli::Client.new(servers, @options)
+ else
+ @data = ::ConnectionPool.new(pool_options) { Dalli::Client.new(servers, @options.merge(:threadsafe => false)) }
+ end
+
+ extend Strategy::LocalCache
+ end
+
+ ##
+ # Access the underlying Dalli::Client or ConnectionPool instance for
+ # access to get_multi, etc.
+ def dalli
+ @data
+ end
+
+ def with(&block)
+ @data.with(&block)
+ end
+
+ def fetch(name, options=nil)
+ options ||= {}
+ name = expanded_key name
+
+ if block_given?
+ unless options[:force]
+ entry = instrument(:read, name, options) do |payload|
+ read_entry(name, options).tap do |result|
+ if payload
+ payload[:super_operation] = :fetch
+ payload[:hit] = !!result
+ end
+ end
+ end
+ end
+
+ if !entry.nil?
+ instrument(:fetch_hit, name, options) { |payload| }
+ entry
+ else
+ result = instrument(:generate, name, options) do |payload|
+ yield
+ end
+ write(name, result, options)
+ result
+ end
+ else
+ read(name, options)
+ end
+ end
+
+ def read(name, options=nil)
+ options ||= {}
+ name = expanded_key name
+
+ instrument(:read, name, options) do |payload|
+ entry = read_entry(name, options)
+ payload[:hit] = !!entry if payload
+ entry
+ end
+ end
+
+ def write(name, value, options=nil)
+ options ||= {}
+ name = expanded_key name
+
+ instrument(:write, name, options) do |payload|
+ with do |connection|
+ options = options.merge(:connection => connection)
+ write_entry(name, value, options)
+ end
+ end
+ end
+
+ def exist?(name, options=nil)
+ options ||= {}
+ name = expanded_key name
+
+ log(:exist, name, options)
+ !read_entry(name, options).nil?
+ end
+
+ def delete(name, options=nil)
+ options ||= {}
+ name = expanded_key name
+
+ instrument(:delete, name, options) do |payload|
+ delete_entry(name, options)
+ end
+ end
+
+ # Reads multiple keys from the cache using a single call to the
+ # servers for all keys. Keys must be Strings.
+ def read_multi(*names)
+ names.extract_options!
+ mapping = names.inject({}) { |memo, name| memo[expanded_key(name)] = name; memo }
+ instrument(:read_multi, names) do
+ results = {}
+ if local_cache
+ mapping.keys.each do |key|
+ if value = local_cache.read_entry(key, options)
+ results[key] = value
+ end
+ end
+ end
+
+ data = with { |c| c.get_multi(mapping.keys - results.keys) }
+ results.merge!(data)
+ results.inject({}) do |memo, (inner, _)|
+ entry = results[inner]
+ # NB Backwards data compatibility, to be removed at some point
+ value = (entry.is_a?(ActiveSupport::Cache::Entry) ? entry.value : entry)
+ memo[mapping[inner]] = value
+ local_cache.write_entry(inner, value, options) if local_cache
+ memo
+ end
+ end
+ end
+
+ # Fetches data from the cache, using the given keys. If there is data in
+ # the cache with the given keys, then that data is returned. Otherwise,
+ # the supplied block is called for each key for which there was no data,
+ # and the result will be written to the cache and returned.
+ def fetch_multi(*names)
+ options = names.extract_options!
+ mapping = names.inject({}) { |memo, name| memo[expanded_key(name)] = name; memo }
+
+ instrument(:fetch_multi, names) do
+ with do |connection|
+ results = connection.get_multi(mapping.keys)
+
+ connection.multi do
+ mapping.inject({}) do |memo, (expanded, name)|
+ memo[name] = results[expanded]
+ if memo[name].nil?
+ value = yield(name)
+ memo[name] = value
+ options = options.merge(:connection => connection)
+ write_entry(expanded, value, options)
+ end
+
+ memo
+ end
+ end
+ end
+ end
+ end
+
+ # Increment a cached value. This method uses the memcached incr atomic
+ # operator and can only be used on values written with the :raw option.
+ # Calling it on a value not stored with :raw will fail.
+ # :initial defaults to the amount passed in, as if the counter was initially zero.
+ # memcached counters cannot hold negative values.
+ def increment(name, amount = 1, options=nil)
+ options ||= {}
+ name = expanded_key name
+ initial = options.has_key?(:initial) ? options[:initial] : amount
+ expires_in = options[:expires_in]
+ instrument(:increment, name, :amount => amount) do
+ with { |c| c.incr(name, amount, expires_in, initial) }
+ end
+ rescue Dalli::DalliError => e
+ logger.error("DalliError: #{e.message}") if logger
+ raise if raise_errors?
+ nil
+ end
+
+ # Decrement a cached value. This method uses the memcached decr atomic
+ # operator and can only be used on values written with the :raw option.
+ # Calling it on a value not stored with :raw will fail.
+ # :initial defaults to zero, as if the counter was initially zero.
+ # memcached counters cannot hold negative values.
+ def decrement(name, amount = 1, options=nil)
+ options ||= {}
+ name = expanded_key name
+ initial = options.has_key?(:initial) ? options[:initial] : 0
+ expires_in = options[:expires_in]
+ instrument(:decrement, name, :amount => amount) do
+ with { |c| c.decr(name, amount, expires_in, initial) }
+ end
+ rescue Dalli::DalliError => e
+ logger.error("DalliError: #{e.message}") if logger
+ raise if raise_errors?
+ nil
+ end
+
+ # Clear the entire cache on all memcached servers. This method should
+ # be used with care when using a shared cache.
+ def clear(options=nil)
+ instrument(:clear, 'flushing all keys') do
+ with { |c| c.flush_all }
+ end
+ rescue Dalli::DalliError => e
+ logger.error("DalliError: #{e.message}") if logger
+ raise if raise_errors?
+ nil
+ end
+
+ # Clear any local cache
+ def cleanup(options=nil)
+ end
+
+ # Get the statistics from the memcached servers.
+ def stats
+ with { |c| c.stats }
+ end
+
+ def reset
+ with { |c| c.reset }
+ end
+
+ def logger
+ Dalli.logger
+ end
+
+ def logger=(new_logger)
+ Dalli.logger = new_logger
+ end
+
+ protected
+
+ # Read an entry from the cache.
+ def read_entry(key, options) # :nodoc:
+ entry = with { |c| c.get(key, options) }
+ # NB Backwards data compatibility, to be removed at some point
+ entry.is_a?(ActiveSupport::Cache::Entry) ? entry.value : entry
+ rescue Dalli::DalliError => e
+ logger.error("DalliError: #{e.message}") if logger
+ raise if raise_errors?
+ nil
+ end
+
+ # Write an entry to the cache.
+ def write_entry(key, value, options) # :nodoc:
+ # cleanup LocalCache
+ cleanup if options[:unless_exist]
+ method = options[:unless_exist] ? :add : :set
+ expires_in = options[:expires_in]
+ connection = options.delete(:connection)
+ connection.send(method, key, value, expires_in, options)
+ rescue Dalli::DalliError => e
+ logger.error("DalliError: #{e.message}") if logger
+ raise if raise_errors?
+ false
+ end
+
+ # Delete an entry from the cache.
+ def delete_entry(key, options) # :nodoc:
+ with { |c| c.delete(key) }
+ rescue Dalli::DalliError => e
+ logger.error("DalliError: #{e.message}") if logger
+ raise if raise_errors?
+ false
+ end
+
+ private
+ # Expand key to be a consistent string value. Invoke +cache_key+ if
+ # object responds to +cache_key+. Otherwise, to_param method will be
+ # called. If the key is a Hash, then keys will be sorted alphabetically.
+ def expanded_key(key) # :nodoc:
+ return key.cache_key.to_s if key.respond_to?(:cache_key)
+
+ case key
+ when Array
+ if key.size > 1
+ key = key.collect{|element| expanded_key(element)}
+ else
+ key = key.first
+ end
+ when Hash
+ key = key.sort_by { |k,_| k.to_s }.collect{|k,v| "#{k}=#{v}"}
+ end
+
+ key = key.to_param
+ if key.respond_to? :force_encoding
+ key = key.dup
+ key.force_encoding('binary')
+ end
+ key
+ end
+
+ def instrument(operation, key, options=nil)
+ log(operation, key, options)
+
+ payload = { :key => key }
+ payload.merge!(options) if options.is_a?(Hash)
+ ActiveSupport::Notifications.instrument("cache_#{operation}.active_support", payload){ yield(payload) }
+ end
+
+ def log(operation, key, options=nil)
+ return unless logger && logger.debug? && !silence?
+ logger.debug("Cache #{operation}: #{key}#{options.blank? ? "" : " (#{options.inspect})"}")
+ end
+
+ def raise_errors?
+ !!@options[:raise_errors]
+ end
+ end
+ end
+end
diff --git a/lib/dalli-2.7.2/lib/dalli.rb b/lib/dalli-2.7.2/lib/dalli.rb
new file mode 100644
index 000000000..62e3b42fe
--- /dev/null
+++ b/lib/dalli-2.7.2/lib/dalli.rb
@@ -0,0 +1,46 @@
+require 'dalli/compressor'
+require 'dalli/client'
+require 'dalli/ring'
+require 'dalli/server'
+require 'dalli/socket'
+require 'dalli/version'
+require 'dalli/options'
+require 'dalli/railtie' if defined?(::Rails::Railtie)
+
+module Dalli
+ # generic error
+ class DalliError < RuntimeError; end
+ # socket/server communication error
+ class NetworkError < DalliError; end
+ # no server available/alive error
+ class RingError < DalliError; end
+ # application error in marshalling serialization
+ class MarshalError < DalliError; end
+ # application error in marshalling deserialization or decompression
+ class UnmarshalError < DalliError; end
+
+ def self.logger
+ @logger ||= (rails_logger || default_logger)
+ end
+
+ def self.rails_logger
+ (defined?(Rails) && Rails.respond_to?(:logger) && Rails.logger) ||
+ (defined?(RAILS_DEFAULT_LOGGER) && RAILS_DEFAULT_LOGGER.respond_to?(:debug) && RAILS_DEFAULT_LOGGER)
+ end
+
+ def self.default_logger
+ require 'logger'
+ l = Logger.new(STDOUT)
+ l.level = Logger::INFO
+ l
+ end
+
+ def self.logger=(logger)
+ @logger = logger
+ end
+
+end
+
+if defined?(RAILS_VERSION) && RAILS_VERSION < '3'
+ raise Dalli::DalliError, "Dalli #{Dalli::VERSION} does not support Rails version < 3.0"
+end
diff --git a/lib/dalli-2.7.2/lib/dalli/cas/client.rb b/lib/dalli-2.7.2/lib/dalli/cas/client.rb
new file mode 100644
index 000000000..33295b474
--- /dev/null
+++ b/lib/dalli-2.7.2/lib/dalli/cas/client.rb
@@ -0,0 +1,58 @@
+require 'dalli/client'
+
+module Dalli
+ class Client
+ ##
+ # Get the value and CAS ID associated with the key. If a block is provided,
+ # value and CAS will be passed to the block.
+ def get_cas(key)
+ (value, cas) = perform(:cas, key)
+ value = (!value || value == 'Not found') ? nil : value
+ if block_given?
+ yield value, cas
+ else
+ [value, cas]
+ end
+ end
+
+ ##
+ # Fetch multiple keys efficiently, including available metadata such as CAS.
+ # If a block is given, yields key/data pairs one a time. Data is an array:
+ # [value, cas_id]
+ # If no block is given, returns a hash of
+ # { 'key' => [value, cas_id] }
+ def get_multi_cas(*keys)
+ if block_given?
+ get_multi_yielder(keys) {|*args| yield(*args)}
+ else
+ Hash.new.tap do |hash|
+ get_multi_yielder(keys) {|k, data| hash[k] = data}
+ end
+ end
+ end
+
+ ##
+ # Set the key-value pair, verifying existing CAS.
+ # Returns the resulting CAS value if succeeded, and falsy otherwise.
+ def set_cas(key, value, cas, ttl=nil, options=nil)
+ ttl ||= @options[:expires_in].to_i
+ perform(:set, key, value, ttl, cas, options)
+ end
+
+ ##
+ # Conditionally add a key/value pair, verifying existing CAS, only if the
+ # key already exists on the server. Returns the new CAS value if the
+ # operation succeeded, or falsy otherwise.
+ def replace_cas(key, value, cas, ttl=nil, options=nil)
+ ttl ||= @options[:expires_in].to_i
+ perform(:replace, key, value, ttl, cas, options)
+ end
+
+ # Delete a key/value pair, verifying existing CAS.
+ # Returns true if succeeded, and falsy otherwise.
+ def delete_cas(key, cas=0)
+ perform(:delete, key, cas)
+ end
+
+ end
+end
diff --git a/lib/dalli-2.7.2/lib/dalli/client.rb b/lib/dalli-2.7.2/lib/dalli/client.rb
new file mode 100644
index 000000000..6d65b61b2
--- /dev/null
+++ b/lib/dalli-2.7.2/lib/dalli/client.rb
@@ -0,0 +1,439 @@
+require 'digest/md5'
+require 'set'
+
+# encoding: ascii
+module Dalli
+ class Client
+
+ ##
+ # Dalli::Client is the main class which developers will use to interact with
+ # the memcached server. Usage:
+ #
+ # Dalli::Client.new(['localhost:11211:10', 'cache-2.example.com:11211:5', '192.168.0.1:22122:5'],
+ # :threadsafe => true, :failover => true, :expires_in => 300)
+ #
+ # servers is an Array of "host:port:weight" where weight allows you to distribute cache unevenly.
+ # Both weight and port are optional. If you pass in nil, Dalli will use the MEMCACHE_SERVERS
+ # environment variable or default to 'localhost:11211' if it is not present.
+ #
+ # Options:
+ # - :namespace - prepend each key with this value to provide simple namespacing.
+ # - :failover - if a server is down, look for and store values on another server in the ring. Default: true.
+ # - :threadsafe - ensure that only one thread is actively using a socket at a time. Default: true.
+ # - :expires_in - default TTL in seconds if you do not pass TTL as a parameter to an individual operation, defaults to 0 or forever
+ # - :compress - defaults to false, if true Dalli will compress values larger than 1024 bytes before sending them to memcached.
+ # - :serializer - defaults to Marshal
+ # - :compressor - defaults to zlib
+ #
+ def initialize(servers=nil, options={})
+ @servers = normalize_servers(servers || ENV["MEMCACHE_SERVERS"] || '127.0.0.1:11211')
+ @options = normalize_options(options)
+ @ring = nil
+ end
+
+ #
+ # The standard memcached instruction set
+ #
+
+ ##
+ # Turn on quiet aka noreply support.
+ # All relevant operations within this block will be effectively
+ # pipelined as Dalli will use 'quiet' operations where possible.
+ # Currently supports the set, add, replace and delete operations.
+ def multi
+ old, Thread.current[:dalli_multi] = Thread.current[:dalli_multi], true
+ yield
+ ensure
+ Thread.current[:dalli_multi] = old
+ end
+
+ ##
+ # Get the value associated with the key.
+ def get(key, options=nil)
+ perform(:get, key)
+ end
+
+ ##
+ # Fetch multiple keys efficiently.
+ # If a block is given, yields key/value pairs one at a time.
+ # Otherwise returns a hash of { 'key' => 'value', 'key2' => 'value1' }
+ def get_multi(*keys)
+ if block_given?
+ get_multi_yielder(keys) {|k, data| yield k, data.first}
+ else
+ Hash.new.tap do |hash|
+ get_multi_yielder(keys) {|k, data| hash[k] = data.first}
+ end
+ end
+ end
+
+ def fetch(key, ttl=nil, options=nil)
+ ttl ||= @options[:expires_in].to_i
+ val = get(key, options)
+ if val.nil? && block_given?
+ val = yield
+ add(key, val, ttl, options)
+ end
+ val
+ end
+
+ ##
+ # compare and swap values using optimistic locking.
+ # Fetch the existing value for key.
+ # If it exists, yield the value to the block.
+ # Add the block's return value as the new value for the key.
+ # Add will fail if someone else changed the value.
+ #
+ # Returns:
+ # - nil if the key did not exist.
+ # - false if the value was changed by someone else.
+ # - true if the value was successfully updated.
+ def cas(key, ttl=nil, options=nil, &block)
+ ttl ||= @options[:expires_in].to_i
+ (value, cas) = perform(:cas, key)
+ value = (!value || value == 'Not found') ? nil : value
+ if value
+ newvalue = block.call(value)
+ perform(:set, key, newvalue, ttl, cas, options)
+ end
+ end
+
+ def set(key, value, ttl=nil, options=nil)
+ ttl ||= @options[:expires_in].to_i
+ perform(:set, key, value, ttl, 0, options)
+ end
+
+ ##
+ # Conditionally add a key/value pair, if the key does not already exist
+ # on the server. Returns truthy if the operation succeeded.
+ def add(key, value, ttl=nil, options=nil)
+ ttl ||= @options[:expires_in].to_i
+ perform(:add, key, value, ttl, options)
+ end
+
+ ##
+ # Conditionally add a key/value pair, only if the key already exists
+ # on the server. Returns truthy if the operation succeeded.
+ def replace(key, value, ttl=nil, options=nil)
+ ttl ||= @options[:expires_in].to_i
+ perform(:replace, key, value, ttl, 0, options)
+ end
+
+ def delete(key)
+ perform(:delete, key, 0)
+ end
+
+ ##
+ # Append value to the value already stored on the server for 'key'.
+ # Appending only works for values stored with :raw => true.
+ def append(key, value)
+ perform(:append, key, value.to_s)
+ end
+
+ ##
+ # Prepend value to the value already stored on the server for 'key'.
+ # Prepending only works for values stored with :raw => true.
+ def prepend(key, value)
+ perform(:prepend, key, value.to_s)
+ end
+
+ def flush(delay=0)
+ time = -delay
+ ring.servers.map { |s| s.request(:flush, time += delay) }
+ end
+
+ alias_method :flush_all, :flush
+
+ ##
+ # Incr adds the given amount to the counter on the memcached server.
+ # Amt must be a positive integer value.
+ #
+ # If default is nil, the counter must already exist or the operation
+ # will fail and will return nil. Otherwise this method will return
+ # the new value for the counter.
+ #
+ # Note that the ttl will only apply if the counter does not already
+ # exist. To increase an existing counter and update its TTL, use
+ # #cas.
+ def incr(key, amt=1, ttl=nil, default=nil)
+ raise ArgumentError, "Positive values only: #{amt}" if amt < 0
+ ttl ||= @options[:expires_in].to_i
+ perform(:incr, key, amt.to_i, ttl, default)
+ end
+
+ ##
+ # Decr subtracts the given amount from the counter on the memcached server.
+ # Amt must be a positive integer value.
+ #
+ # memcached counters are unsigned and cannot hold negative values. Calling
+ # decr on a counter which is 0 will just return 0.
+ #
+ # If default is nil, the counter must already exist or the operation
+ # will fail and will return nil. Otherwise this method will return
+ # the new value for the counter.
+ #
+ # Note that the ttl will only apply if the counter does not already
+ # exist. To decrease an existing counter and update its TTL, use
+ # #cas.
+ def decr(key, amt=1, ttl=nil, default=nil)
+ raise ArgumentError, "Positive values only: #{amt}" if amt < 0
+ ttl ||= @options[:expires_in].to_i
+ perform(:decr, key, amt.to_i, ttl, default)
+ end
+
+ ##
+ # Touch updates expiration time for a given key.
+ #
+ # Returns true if key exists, otherwise nil.
+ def touch(key, ttl=nil)
+ ttl ||= @options[:expires_in].to_i
+ resp = perform(:touch, key, ttl)
+ resp.nil? ? nil : true
+ end
+
+ ##
+ # Collect the stats for each server.
+ # You can optionally pass a type including :items or :slabs to get specific stats
+ # Returns a hash like { 'hostname:port' => { 'stat1' => 'value1', ... }, 'hostname2:port' => { ... } }
+ def stats(type=nil)
+ type = nil if ![nil, :items,:slabs].include? type
+ values = {}
+ ring.servers.each do |server|
+ values["#{server.hostname}:#{server.port}"] = server.alive? ? server.request(:stats,type.to_s) : nil
+ end
+ values
+ end
+
+ ##
+ # Reset stats for each server.
+ def reset_stats
+ ring.servers.map do |server|
+ server.alive? ? server.request(:reset_stats) : nil
+ end
+ end
+
+ ##
+ ## Make sure memcache servers are alive, or raise an Dalli::RingError
+ def alive!
+ ring.server_for_key("")
+ end
+
+ ##
+ ## Version of the memcache servers.
+ def version
+ values = {}
+ ring.servers.each do |server|
+ values["#{server.hostname}:#{server.port}"] = server.alive? ? server.request(:version) : nil
+ end
+ values
+ end
+
+ ##
+ # Close our connection to each server.
+ # If you perform another operation after this, the connections will be re-established.
+ def close
+ if @ring
+ @ring.servers.each { |s| s.close }
+ @ring = nil
+ end
+ end
+ alias_method :reset, :close
+
+ # Stub method so a bare Dalli client can pretend to be a connection pool.
+ def with
+ yield self
+ end
+
+ private
+
+ def groups_for_keys(*keys)
+ groups = mapped_keys(keys).flatten.group_by do |key|
+ begin
+ ring.server_for_key(key)
+ rescue Dalli::RingError
+ Dalli.logger.debug { "unable to get key #{key}" }
+ nil
+ end
+ end
+ return groups
+ end
+
+ def mapped_keys(keys)
+ keys.flatten.map {|a| validate_key(a.to_s)}
+ end
+
+ def make_multi_get_requests(groups)
+ groups.each do |server, keys_for_server|
+ begin
+ # TODO: do this with the perform chokepoint?
+ # But given the fact that fetching the response doesn't take place
+ # in that slot it's misleading anyway. Need to move all of this method
+ # into perform to be meaningful
+ server.request(:send_multiget, keys_for_server)
+ rescue DalliError, NetworkError => e
+ Dalli.logger.debug { e.inspect }
+ Dalli.logger.debug { "unable to get keys for server #{server.hostname}:#{server.port}" }
+ end
+ end
+ end
+
+ def perform_multi_response_start(servers)
+ servers.each do |server|
+ next unless server.alive?
+ begin
+ server.multi_response_start
+ rescue DalliError, NetworkError => e
+ Dalli.logger.debug { e.inspect }
+ Dalli.logger.debug { "results from this server will be missing" }
+ servers.delete(server)
+ end
+ end
+ servers
+ end
+
+ ##
+ # Normalizes the argument into an array of servers. If the argument is a string, it's expected to be of
+ # the format "memcache1.example.com:11211[,memcache2.example.com:11211[,memcache3.example.com:11211[...]]]
+ def normalize_servers(servers)
+ if servers.is_a? String
+ return servers.split(",")
+ else
+ return servers
+ end
+ end
+
+ def ring
+ @ring ||= Dalli::Ring.new(
+ @servers.map do |s|
+ server_options = {}
+ if s =~ %r{\Amemcached://}
+ uri = URI.parse(s)
+ server_options[:username] = uri.user
+ server_options[:password] = uri.password
+ s = "#{uri.host}:#{uri.port}"
+ end
+ Dalli::Server.new(s, @options.merge(server_options))
+ end, @options
+ )
+ end
+
+ # Chokepoint method for instrumentation
+ def perform(*all_args, &blk)
+ return blk.call if blk
+ op, key, *args = *all_args
+
+ key = key.to_s
+ key = validate_key(key)
+ begin
+ server = ring.server_for_key(key)
+ ret = server.request(op, key, *args)
+ ret
+ rescue NetworkError => e
+ Dalli.logger.debug { e.inspect }
+ Dalli.logger.debug { "retrying request with new server" }
+ retry
+ end
+ end
+
+ def validate_key(key)
+ raise ArgumentError, "key cannot be blank" if !key || key.length == 0
+ key = key_with_namespace(key)
+ if key.length > 250
+ max_length_before_namespace = 212 - (namespace || '').size
+ key = "#{key[0, max_length_before_namespace]}:md5:#{Digest::MD5.hexdigest(key)}"
+ end
+ return key
+ end
+
+ def key_with_namespace(key)
+ (ns = namespace) ? "#{ns}:#{key}" : key
+ end
+
+ def key_without_namespace(key)
+ (ns = namespace) ? key.sub(%r(\A#{ns}:), '') : key
+ end
+
+ def namespace
+ return nil unless @options[:namespace]
+ @options[:namespace].is_a?(Proc) ? @options[:namespace].call.to_s : @options[:namespace].to_s
+ end
+
+ def normalize_options(opts)
+ if opts[:compression]
+ Dalli.logger.warn "DEPRECATED: Dalli's :compression option is now just :compress => true. Please update your configuration."
+ opts[:compress] = opts.delete(:compression)
+ end
+ begin
+ opts[:expires_in] = opts[:expires_in].to_i if opts[:expires_in]
+ rescue NoMethodError
+ raise ArgumentError, "cannot convert :expires_in => #{opts[:expires_in].inspect} to an integer"
+ end
+ opts
+ end
+
+ ##
+ # Yields, one at a time, keys and their values+attributes.
+ def get_multi_yielder(keys)
+ perform do
+ return {} if keys.empty?
+ ring.lock do
+ begin
+ groups = groups_for_keys(keys)
+ if unfound_keys = groups.delete(nil)
+ Dalli.logger.debug { "unable to get keys for #{unfound_keys.length} keys because no matching server was found" }
+ end
+ make_multi_get_requests(groups)
+
+ servers = groups.keys
+ return if servers.empty?
+ servers = perform_multi_response_start(servers)
+
+ start = Time.now
+ loop do
+ # remove any dead servers
+ servers.delete_if { |s| s.sock.nil? }
+ break if servers.empty?
+
+ # calculate remaining timeout
+ elapsed = Time.now - start
+ timeout = servers.first.options[:socket_timeout]
+ if elapsed > timeout
+ readable = nil
+ else
+ sockets = servers.map(&:sock)
+ readable, _ = IO.select(sockets, nil, nil, timeout - elapsed)
+ end
+
+ if readable.nil?
+ # no response within timeout; abort pending connections
+ servers.each do |server|
+ Dalli.logger.debug { "memcached at #{server.name} did not response within timeout" }
+ server.multi_response_abort
+ end
+ break
+
+ else
+ readable.each do |sock|
+ server = sock.server
+
+ begin
+ server.multi_response_nonblock.each_pair do |key, value_list|
+ yield key_without_namespace(key), value_list
+ end
+
+ if server.multi_response_completed?
+ servers.delete(server)
+ end
+ rescue NetworkError
+ servers.delete(server)
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+
+ end
+end
diff --git a/lib/dalli-2.7.2/lib/dalli/compressor.rb b/lib/dalli-2.7.2/lib/dalli/compressor.rb
new file mode 100644
index 000000000..353593ab9
--- /dev/null
+++ b/lib/dalli-2.7.2/lib/dalli/compressor.rb
@@ -0,0 +1,29 @@
+require 'zlib'
+require 'stringio'
+
+module Dalli
+ class Compressor
+ def self.compress(data)
+ Zlib::Deflate.deflate(data)
+ end
+
+ def self.decompress(data)
+ Zlib::Inflate.inflate(data)
+ end
+ end
+
+ class GzipCompressor
+ def self.compress(data)
+ io = StringIO.new("w")
+ gz = Zlib::GzipWriter.new(io)
+ gz.write(data)
+ gz.close
+ io.string
+ end
+
+ def self.decompress(data)
+ io = StringIO.new(data, "rb")
+ Zlib::GzipReader.new(io).read
+ end
+ end
+end
diff --git a/lib/dalli-2.7.2/lib/dalli/options.rb b/lib/dalli-2.7.2/lib/dalli/options.rb
new file mode 100644
index 000000000..b3c405203
--- /dev/null
+++ b/lib/dalli-2.7.2/lib/dalli/options.rb
@@ -0,0 +1,64 @@
+require 'thread'
+require 'monitor'
+
+module Dalli
+
+ # Make Dalli threadsafe by using a lock around all
+ # public server methods.
+ #
+ # Dalli::Server.extend(Dalli::Threadsafe)
+ #
+ module Threadsafe
+ def self.extended(obj)
+ obj.init_threadsafe
+ end
+
+ def request(op, *args)
+ @lock.synchronize do
+ super
+ end
+ end
+
+ def alive?
+ @lock.synchronize do
+ super
+ end
+ end
+
+ def close
+ @lock.synchronize do
+ super
+ end
+ end
+
+ def multi_response_start
+ @lock.synchronize do
+ super
+ end
+ end
+
+ def multi_response_nonblock
+ @lock.synchronize do
+ super
+ end
+ end
+
+ def multi_response_abort
+ @lock.synchronize do
+ super
+ end
+ end
+
+ def lock!
+ @lock.mon_enter
+ end
+
+ def unlock!
+ @lock.mon_exit
+ end
+
+ def init_threadsafe
+ @lock = Monitor.new
+ end
+ end
+end
diff --git a/lib/dalli-2.7.2/lib/dalli/railtie.rb b/lib/dalli-2.7.2/lib/dalli/railtie.rb
new file mode 100644
index 000000000..9c5d5d2b9
--- /dev/null
+++ b/lib/dalli-2.7.2/lib/dalli/railtie.rb
@@ -0,0 +1,7 @@
+module Dalli
+ class Railtie < ::Rails::Railtie
+ config.before_configuration do
+ config.cache_store = :dalli_store
+ end
+ end
+end
diff --git a/lib/dalli-2.7.2/lib/dalli/ring.rb b/lib/dalli-2.7.2/lib/dalli/ring.rb
new file mode 100644
index 000000000..11c234021
--- /dev/null
+++ b/lib/dalli-2.7.2/lib/dalli/ring.rb
@@ -0,0 +1,142 @@
+require 'digest/sha1'
+require 'zlib'
+
+module Dalli
+ class Ring
+ POINTS_PER_SERVER = 160 # this is the default in libmemcached
+
+ attr_accessor :servers, :continuum
+
+ def initialize(servers, options)
+ @servers = servers
+ @continuum = nil
+ if servers.size > 1
+ total_weight = servers.inject(0) { |memo, srv| memo + srv.weight }
+ continuum = []
+ servers.each do |server|
+ entry_count_for(server, servers.size, total_weight).times do |idx|
+ hash = Digest::SHA1.hexdigest("#{server.hostname}:#{server.port}:#{idx}")
+ value = Integer("0x#{hash[0..7]}")
+ continuum << Dalli::Ring::Entry.new(value, server)
+ end
+ end
+ @continuum = continuum.sort { |a, b| a.value <=> b.value }
+ end
+
+ threadsafe! unless options[:threadsafe] == false
+ @failover = options[:failover] != false
+ end
+
+ def server_for_key(key)
+ if @continuum
+ hkey = hash_for(key)
+ 20.times do |try|
+ entryidx = binary_search(@continuum, hkey)
+ server = @continuum[entryidx].server
+ return server if server.alive?
+ break unless @failover
+ hkey = hash_for("#{try}#{key}")
+ end
+ else
+ server = @servers.first
+ return server if server && server.alive?
+ end
+
+ raise Dalli::RingError, "No server available"
+ end
+
+ def lock
+ @servers.each { |s| s.lock! }
+ begin
+ return yield
+ ensure
+ @servers.each { |s| s.unlock! }
+ end
+ end
+
+ private
+
+ def threadsafe!
+ @servers.each do |s|
+ s.extend(Dalli::Threadsafe)
+ end
+ end
+
+ def hash_for(key)
+ Zlib.crc32(key)
+ end
+
+ def entry_count_for(server, total_servers, total_weight)
+ ((total_servers * POINTS_PER_SERVER * server.weight) / Float(total_weight)).floor
+ end
+
+ # Native extension to perform the binary search within the continuum
+ # space. Fallback to a pure Ruby version if the compilation doesn't work.
+ # optional for performance and only necessary if you are using multiple
+ # memcached servers.
+ begin
+ require 'inline'
+ inline do |builder|
+ builder.c <<-EOM
+ int binary_search(VALUE ary, unsigned int r) {
+ long upper = RARRAY_LEN(ary) - 1;
+ long lower = 0;
+ long idx = 0;
+ ID value = rb_intern("value");
+ VALUE continuumValue;
+ unsigned int l;
+
+ while (lower <= upper) {
+ idx = (lower + upper) / 2;
+
+ continuumValue = rb_funcall(RARRAY_PTR(ary)[idx], value, 0);
+ l = NUM2UINT(continuumValue);
+ if (l == r) {
+ return idx;
+ }
+ else if (l > r) {
+ upper = idx - 1;
+ }
+ else {
+ lower = idx + 1;
+ }
+ }
+ return upper;
+ }
+ EOM
+ end
+ rescue LoadError
+ # Find the closest index in the Ring with value <= the given value
+ def binary_search(ary, value)
+ upper = ary.size - 1
+ lower = 0
+ idx = 0
+
+ while (lower <= upper) do
+ idx = (lower + upper) / 2
+ comp = ary[idx].value <=> value
+
+ if comp == 0
+ return idx
+ elsif comp > 0
+ upper = idx - 1
+ else
+ lower = idx + 1
+ end
+ end
+ return upper
+ end
+ end
+
+ class Entry
+ attr_reader :value
+ attr_reader :server
+
+ def initialize(val, srv)
+ @value = val
+ @server = srv
+ end
+ end
+
+ end
+end
diff --git a/lib/dalli-2.7.2/lib/dalli/server.rb b/lib/dalli-2.7.2/lib/dalli/server.rb
new file mode 100644
index 000000000..38a4fb62d
--- /dev/null
+++ b/lib/dalli-2.7.2/lib/dalli/server.rb
@@ -0,0 +1,696 @@
+require 'socket'
+require 'timeout'
+
+module Dalli
+ class Server
+ attr_accessor :hostname
+ attr_accessor :port
+ attr_accessor :weight
+ attr_accessor :options
+ attr_reader :sock
+
+ DEFAULTS = {
+ # seconds between trying to contact a remote server
+ :down_retry_delay => 1,
+ # connect/read/write timeout for socket operations
+ :socket_timeout => 0.5,
+ # times a socket operation may fail before considering the server dead
+ :socket_max_failures => 2,
+ # amount of time to sleep between retries when a failure occurs
+ :socket_failure_delay => 0.01,
+ # max size of value in bytes (default is 1 MB, can be overriden with "memcached -I ")
+ :value_max_bytes => 1024 * 1024,
+ :compressor => Compressor,
+ # min byte size to attempt compression
+ :compression_min_size => 1024,
+ # max byte size for compression
+ :compression_max_size => false,
+ :serializer => Marshal,
+ :username => nil,
+ :password => nil,
+ :keepalive => true
+ }
+
+ def initialize(attribs, options = {})
+ (@hostname, @port, @weight) = parse_hostname(attribs)
+ @port ||= 11211
+ @port = Integer(@port)
+ @weight ||= 1
+ @weight = Integer(@weight)
+ @fail_count = 0
+ @down_at = nil
+ @last_down_at = nil
+ @options = DEFAULTS.merge(options)
+ @sock = nil
+ @msg = nil
+ @error = nil
+ @pid = nil
+ @inprogress = nil
+ end
+
+ def name
+ "#{@hostname}:#{@port}"
+ end
+
+ # Chokepoint method for instrumentation
+ def request(op, *args)
+ verify_state
+ raise Dalli::NetworkError, "#{hostname}:#{port} is down: #{@error} #{@msg}. If you are sure it is running, ensure memcached version is > 1.4." unless alive?
+ begin
+ send(op, *args)
+ rescue Dalli::NetworkError
+ raise
+ rescue Dalli::MarshalError => ex
+ Dalli.logger.error "Marshalling error for key '#{args.first}': #{ex.message}"
+ Dalli.logger.error "You are trying to cache a Ruby object which cannot be serialized to memcached."
+ Dalli.logger.error ex.backtrace.join("\n\t")
+ false
+ rescue Dalli::DalliError
+ raise
+ rescue => ex
+ Dalli.logger.error "Unexpected exception in Dalli: #{ex.class.name}: #{ex.message}"
+ Dalli.logger.error "This is a bug in Dalli, please enter an issue in Github if it does not already exist."
+ Dalli.logger.error ex.backtrace.join("\n\t")
+ down!
+ end
+ end
+
+ def alive?
+ return true if @sock
+
+ if @last_down_at && @last_down_at + options[:down_retry_delay] >= Time.now
+ time = @last_down_at + options[:down_retry_delay] - Time.now
+ Dalli.logger.debug { "down_retry_delay not reached for #{hostname}:#{port} (%.3f seconds left)" % time }
+ return false
+ end
+
+ connect
+ !!@sock
+ rescue Dalli::NetworkError
+ false
+ end
+
+ def close
+ return unless @sock
+ @sock.close rescue nil
+ @sock = nil
+ @pid = nil
+ @inprogress = false
+ end
+
+ def lock!
+ end
+
+ def unlock!
+ end
+
+ def serializer
+ @options[:serializer]
+ end
+
+ def compressor
+ @options[:compressor]
+ end
+
+ # Start reading key/value pairs from this connection. This is usually called
+ # after a series of GETKQ commands. A NOOP is sent, and the server begins
+ # flushing responses for kv pairs that were found.
+ #
+ # Returns nothing.
+ def multi_response_start
+ verify_state
+ write_noop
+ @multi_buffer = ''
+ @position = 0
+ @inprogress = true
+ end
+
+ # Did the last call to #multi_response_start complete successfully?
+ def multi_response_completed?
+ @multi_buffer.nil?
+ end
+
+ # Attempt to receive and parse as many key/value pairs as possible
+ # from this server. After #multi_response_start, this should be invoked
+ # repeatedly whenever this server's socket is readable until
+ # #multi_response_completed?.
+ #
+ # Returns a Hash of kv pairs received.
+ def multi_response_nonblock
+ raise 'multi_response has completed' if @multi_buffer.nil?
+
+ @multi_buffer << @sock.read_available
+ buf = @multi_buffer
+ pos = @position
+ values = {}
+
+ while buf.bytesize - pos >= 24
+ header = buf.slice(pos, 24)
+ (key_length, _, body_length, cas) = header.unpack(KV_HEADER)
+
+ if key_length == 0
+ # all done!
+ @multi_buffer = nil
+ @position = nil
+ @inprogress = false
+ break
+
+ elsif buf.bytesize - pos >= 24 + body_length
+ flags = buf.slice(pos + 24, 4).unpack('N')[0]
+ key = buf.slice(pos + 24 + 4, key_length)
+ value = buf.slice(pos + 24 + 4 + key_length, body_length - key_length - 4) if body_length - key_length - 4 > 0
+
+ pos = pos + 24 + body_length
+
+ begin
+ values[key] = [deserialize(value, flags), cas]
+ rescue DalliError
+ end
+
+ else
+ # not enough data yet, wait for more
+ break
+ end
+ end
+ @position = pos
+
+ values
+ rescue SystemCallError, Timeout::Error, EOFError => e
+ failure!(e)
+ end
+
+ # Abort an earlier #multi_response_start. Used to signal an external
+ # timeout. The underlying socket is disconnected, and the exception is
+ # swallowed.
+ #
+ # Returns nothing.
+ def multi_response_abort
+ @multi_buffer = nil
+ @position = nil
+ @inprogress = false
+ failure!(RuntimeError.new('External timeout'))
+ rescue NetworkError
+ true
+ end
+
+ # NOTE: Additional public methods should be overridden in Dalli::Threadsafe
+
+ private
+
+ def verify_state
+ failure!(RuntimeError.new('Already writing to socket')) if @inprogress
+ failure!(RuntimeError.new('Cannot share client between multiple processes')) if @pid && @pid != Process.pid
+ end
+
+ def failure!(exception)
+ message = "#{hostname}:#{port} failed (count: #{@fail_count}) #{exception.class}: #{exception.message}"
+ Dalli.logger.info { message }
+
+ @fail_count += 1
+ if @fail_count >= options[:socket_max_failures]
+ down!
+ else
+ close
+ sleep(options[:socket_failure_delay]) if options[:socket_failure_delay]
+ raise Dalli::NetworkError, "Socket operation failed, retrying..."
+ end
+ end
+
+ def down!
+ close
+
+ @last_down_at = Time.now
+
+ if @down_at
+ time = Time.now - @down_at
+ Dalli.logger.debug { "#{hostname}:#{port} is still down (for %.3f seconds now)" % time }
+ else
+ @down_at = @last_down_at
+ Dalli.logger.warn { "#{hostname}:#{port} is down" }
+ end
+
+ @error = $! && $!.class.name
+ @msg = @msg || ($! && $!.message && !$!.message.empty? && $!.message)
+ raise Dalli::NetworkError, "#{hostname}:#{port} is down: #{@error} #{@msg}"
+ end
+
+ def up!
+ if @down_at
+ time = Time.now - @down_at
+ Dalli.logger.warn { "#{hostname}:#{port} is back (downtime was %.3f seconds)" % time }
+ end
+
+ @fail_count = 0
+ @down_at = nil
+ @last_down_at = nil
+ @msg = nil
+ @error = nil
+ end
+
+ def multi?
+ Thread.current[:dalli_multi]
+ end
+
+ def get(key)
+ req = [REQUEST, OPCODES[:get], key.bytesize, 0, 0, 0, key.bytesize, 0, 0, key].pack(FORMAT[:get])
+ write(req)
+ generic_response(true)
+ end
+
+ def send_multiget(keys)
+ req = ""
+ keys.each do |key|
+ req << [REQUEST, OPCODES[:getkq], key.bytesize, 0, 0, 0, key.bytesize, 0, 0, key].pack(FORMAT[:getkq])
+ end
+ # Could send noop here instead of in multi_response_start
+ write(req)
+ end
+
+ def set(key, value, ttl, cas, options)
+ (value, flags) = serialize(key, value, options)
+ ttl = sanitize_ttl(ttl)
+
+ guard_max_value(key, value) do
+ req = [REQUEST, OPCODES[multi? ? :setq : :set], key.bytesize, 8, 0, 0, value.bytesize + key.bytesize + 8, 0, cas, flags, ttl, key, value].pack(FORMAT[:set])
+ write(req)
+ cas_response unless multi?
+ end
+ end
+
+ def add(key, value, ttl, options)
+ (value, flags) = serialize(key, value, options)
+ ttl = sanitize_ttl(ttl)
+
+ guard_max_value(key, value) do
+ req = [REQUEST, OPCODES[multi? ? :addq : :add], key.bytesize, 8, 0, 0, value.bytesize + key.bytesize + 8, 0, 0, flags, ttl, key, value].pack(FORMAT[:add])
+ write(req)
+ cas_response unless multi?
+ end
+ end
+
+ def replace(key, value, ttl, cas, options)
+ (value, flags) = serialize(key, value, options)
+ ttl = sanitize_ttl(ttl)
+
+ guard_max_value(key, value) do
+ req = [REQUEST, OPCODES[multi? ? :replaceq : :replace], key.bytesize, 8, 0, 0, value.bytesize + key.bytesize + 8, 0, cas, flags, ttl, key, value].pack(FORMAT[:replace])
+ write(req)
+ cas_response unless multi?
+ end
+ end
+
+ def delete(key, cas)
+ req = [REQUEST, OPCODES[multi? ? :deleteq : :delete], key.bytesize, 0, 0, 0, key.bytesize, 0, cas, key].pack(FORMAT[:delete])
+ write(req)
+ generic_response unless multi?
+ end
+
+ def flush(ttl)
+ req = [REQUEST, OPCODES[:flush], 0, 4, 0, 0, 4, 0, 0, 0].pack(FORMAT[:flush])
+ write(req)
+ generic_response
+ end
+
+ def decr_incr(opcode, key, count, ttl, default)
+ expiry = default ? sanitize_ttl(ttl) : 0xFFFFFFFF
+ default ||= 0
+ (h, l) = split(count)
+ (dh, dl) = split(default)
+ req = [REQUEST, OPCODES[opcode], key.bytesize, 20, 0, 0, key.bytesize + 20, 0, 0, h, l, dh, dl, expiry, key].pack(FORMAT[opcode])
+ write(req)
+ body = generic_response
+ body ? body.unpack('Q>').first : body
+ end
+
+ def decr(key, count, ttl, default)
+ decr_incr :decr, key, count, ttl, default
+ end
+
+ def incr(key, count, ttl, default)
+ decr_incr :incr, key, count, ttl, default
+ end
+
+ def write_append_prepend(opcode, key, value)
+ write_generic [REQUEST, OPCODES[opcode], key.bytesize, 0, 0, 0, value.bytesize + key.bytesize, 0, 0, key, value].pack(FORMAT[opcode])
+ end
+
+ def write_generic(bytes)
+ write(bytes)
+ generic_response
+ end
+
+ def write_noop
+ req = [REQUEST, OPCODES[:noop], 0, 0, 0, 0, 0, 0, 0].pack(FORMAT[:noop])
+ write(req)
+ end
+
+ # Noop is a keepalive operation but also used to demarcate the end of a set of pipelined commands.
+ # We need to read all the responses at once.
+ def noop
+ write_noop
+ multi_response
+ end
+
+ def append(key, value)
+ write_append_prepend :append, key, value
+ end
+
+ def prepend(key, value)
+ write_append_prepend :prepend, key, value
+ end
+
+ def stats(info='')
+ req = [REQUEST, OPCODES[:stat], info.bytesize, 0, 0, 0, info.bytesize, 0, 0, info].pack(FORMAT[:stat])
+ write(req)
+ keyvalue_response
+ end
+
+ def reset_stats
+ write_generic [REQUEST, OPCODES[:stat], 'reset'.bytesize, 0, 0, 0, 'reset'.bytesize, 0, 0, 'reset'].pack(FORMAT[:stat])
+ end
+
+ def cas(key)
+ req = [REQUEST, OPCODES[:get], key.bytesize, 0, 0, 0, key.bytesize, 0, 0, key].pack(FORMAT[:get])
+ write(req)
+ data_cas_response
+ end
+
+ def version
+ write_generic [REQUEST, OPCODES[:version], 0, 0, 0, 0, 0, 0, 0].pack(FORMAT[:noop])
+ end
+
+ def touch(key, ttl)
+ ttl = sanitize_ttl(ttl)
+ write_generic [REQUEST, OPCODES[:touch], key.bytesize, 4, 0, 0, key.bytesize + 4, 0, 0, ttl, key].pack(FORMAT[:touch])
+ end
+
+ # http://www.hjp.at/zettel/m/memcached_flags.rxml
+ # Looks like most clients use bit 0 to indicate native language serialization
+ # and bit 1 to indicate gzip compression.
+ FLAG_SERIALIZED = 0x1
+ FLAG_COMPRESSED = 0x2
+
+ def serialize(key, value, options=nil)
+ marshalled = false
+ value = unless options && options[:raw]
+ marshalled = true
+ begin
+ self.serializer.dump(value)
+ rescue => ex
+ # Marshalling can throw several different types of generic Ruby exceptions.
+ # Convert to a specific exception so we can special case it higher up the stack.
+ exc = Dalli::MarshalError.new(ex.message)
+ exc.set_backtrace ex.backtrace
+ raise exc
+ end
+ else
+ value.to_s
+ end
+ compressed = false
+ if @options[:compress] && value.bytesize >= @options[:compression_min_size] &&
+ (!@options[:compression_max_size] || value.bytesize <= @options[:compression_max_size])
+ value = self.compressor.compress(value)
+ compressed = true
+ end
+
+ flags = 0
+ flags |= FLAG_COMPRESSED if compressed
+ flags |= FLAG_SERIALIZED if marshalled
+ [value, flags]
+ end
+
+ def deserialize(value, flags)
+ value = self.compressor.decompress(value) if (flags & FLAG_COMPRESSED) != 0
+ value = self.serializer.load(value) if (flags & FLAG_SERIALIZED) != 0
+ value
+ rescue TypeError
+ raise if $!.message !~ /needs to have method `_load'|exception class\/object expected|instance of IO needed|incompatible marshal file format/
+ raise UnmarshalError, "Unable to unmarshal value: #{$!.message}"
+ rescue ArgumentError
+ raise if $!.message !~ /undefined class|marshal data too short/
+ raise UnmarshalError, "Unable to unmarshal value: #{$!.message}"
+ rescue Zlib::Error
+ raise UnmarshalError, "Unable to uncompress value: #{$!.message}"
+ end
+
+ def data_cas_response
+ (extras, _, status, count, _, cas) = read_header.unpack(CAS_HEADER)
+ data = read(count) if count > 0
+ if status == 1
+ nil
+ elsif status != 0
+ raise Dalli::DalliError, "Response error #{status}: #{RESPONSE_CODES[status]}"
+ elsif data
+ flags = data[0...extras].unpack('N')[0]
+ value = data[extras..-1]
+ data = deserialize(value, flags)
+ end
+ [data, cas]
+ end
+
+ CAS_HEADER = '@4CCnNNQ'
+ NORMAL_HEADER = '@4CCnN'
+ KV_HEADER = '@2n@6nN@16Q'
+
+ def guard_max_value(key, value)
+ if value.bytesize <= @options[:value_max_bytes]
+ yield
+ else
+ Dalli.logger.warn "Value for #{key} over max size: #{@options[:value_max_bytes]} <= #{value.bytesize}"
+ false
+ end
+ end
+
+ # https://code.google.com/p/memcached/wiki/NewCommands#Standard_Protocol
+ # > An expiration time, in seconds. Can be up to 30 days. After 30 days, is treated as a unix timestamp of an exact date.
+ MAX_ACCEPTABLE_EXPIRATION_INTERVAL = 30*24*60*60 # 30 days
+ def sanitize_ttl(ttl)
+ if ttl > MAX_ACCEPTABLE_EXPIRATION_INTERVAL
+ Dalli.logger.debug "Expiration interval too long for Memcached, converting to an expiration timestamp"
+ Time.now.to_i + ttl
+ else
+ ttl
+ end
+ end
+
+ def generic_response(unpack=false)
+ (extras, _, status, count) = read_header.unpack(NORMAL_HEADER)
+ data = read(count) if count > 0
+ if status == 1
+ nil
+ elsif status == 2 || status == 5
+ false # Not stored, normal status for add operation
+ elsif status != 0
+ raise Dalli::DalliError, "Response error #{status}: #{RESPONSE_CODES[status]}"
+ elsif data
+ flags = data[0...extras].unpack('N')[0]
+ value = data[extras..-1]
+ unpack ? deserialize(value, flags) : value
+ else
+ true
+ end
+ end
+
+ def cas_response
+ (_, _, status, count, _, cas) = read_header.unpack(CAS_HEADER)
+ read(count) if count > 0 # this is potential data that we don't care about
+ if status == 1
+ nil
+ elsif status == 2 || status == 5
+ false # Not stored, normal status for add operation
+ elsif status != 0
+ raise Dalli::DalliError, "Response error #{status}: #{RESPONSE_CODES[status]}"
+ else
+ cas
+ end
+ end
+
+ def keyvalue_response
+ hash = {}
+ loop do
+ (key_length, _, body_length, _) = read_header.unpack(KV_HEADER)
+ return hash if key_length == 0
+ key = read(key_length)
+ value = read(body_length - key_length) if body_length - key_length > 0
+ hash[key] = value
+ end
+ end
+
+ def multi_response
+ hash = {}
+ loop do
+ (key_length, _, body_length, _) = read_header.unpack(KV_HEADER)
+ return hash if key_length == 0
+ flags = read(4).unpack('N')[0]
+ key = read(key_length)
+ value = read(body_length - key_length - 4) if body_length - key_length - 4 > 0
+ hash[key] = deserialize(value, flags)
+ end
+ end
+
+ def write(bytes)
+ begin
+ @inprogress = true
+ result = @sock.write(bytes)
+ @inprogress = false
+ result
+ rescue SystemCallError, Timeout::Error => e
+ failure!(e)
+ end
+ end
+
+ def read(count)
+ begin
+ @inprogress = true
+ data = @sock.readfull(count)
+ @inprogress = false
+ data
+ rescue SystemCallError, Timeout::Error, EOFError => e
+ failure!(e)
+ end
+ end
+
+ def read_header
+ read(24) || raise(Dalli::NetworkError, 'No response')
+ end
+
+ def connect
+ Dalli.logger.debug { "Dalli::Server#connect #{hostname}:#{port}" }
+
+ begin
+ @pid = Process.pid
+ @sock = KSocket.open(hostname, port, self, options)
+ @version = version # trigger actual connect
+ sasl_authentication if need_auth?
+ up!
+ rescue Dalli::DalliError # SASL auth failure
+ raise
+ rescue SystemCallError, Timeout::Error, EOFError, SocketError => e
+ # SocketError = DNS resolution failure
+ failure!(e)
+ end
+ end
+
+ def split(n)
+ [n >> 32, 0xFFFFFFFF & n]
+ end
+
+ REQUEST = 0x80
+ RESPONSE = 0x81
+
+ RESPONSE_CODES = {
+ 0 => 'No error',
+ 1 => 'Key not found',
+ 2 => 'Key exists',
+ 3 => 'Value too large',
+ 4 => 'Invalid arguments',
+ 5 => 'Item not stored',
+ 6 => 'Incr/decr on a non-numeric value',
+ 0x20 => 'Authentication required',
+ 0x81 => 'Unknown command',
+ 0x82 => 'Out of memory',
+ }
+
+ OPCODES = {
+ :get => 0x00,
+ :set => 0x01,
+ :add => 0x02,
+ :replace => 0x03,
+ :delete => 0x04,
+ :incr => 0x05,
+ :decr => 0x06,
+ :flush => 0x08,
+ :noop => 0x0A,
+ :version => 0x0B,
+ :getkq => 0x0D,
+ :append => 0x0E,
+ :prepend => 0x0F,
+ :stat => 0x10,
+ :setq => 0x11,
+ :addq => 0x12,
+ :replaceq => 0x13,
+ :deleteq => 0x14,
+ :incrq => 0x15,
+ :decrq => 0x16,
+ :auth_negotiation => 0x20,
+ :auth_request => 0x21,
+ :auth_continue => 0x22,
+ :touch => 0x1C,
+ }
+
+ HEADER = "CCnCCnNNQ"
+ OP_FORMAT = {
+ :get => 'a*',
+ :set => 'NNa*a*',
+ :add => 'NNa*a*',
+ :replace => 'NNa*a*',
+ :delete => 'a*',
+ :incr => 'NNNNNa*',
+ :decr => 'NNNNNa*',
+ :flush => 'N',
+ :noop => '',
+ :getkq => 'a*',
+ :version => '',
+ :stat => 'a*',
+ :append => 'a*a*',
+ :prepend => 'a*a*',
+ :auth_request => 'a*a*',
+ :auth_continue => 'a*a*',
+ :touch => 'Na*',
+ }
+ FORMAT = OP_FORMAT.inject({}) { |memo, (k, v)| memo[k] = HEADER + v; memo }
+
+
+ #######
+ # SASL authentication support for NorthScale
+ #######
+
+ def need_auth?
+ @options[:username] || ENV['MEMCACHE_USERNAME']
+ end
+
+ def username
+ @options[:username] || ENV['MEMCACHE_USERNAME']
+ end
+
+ def password
+ @options[:password] || ENV['MEMCACHE_PASSWORD']
+ end
+
+ def sasl_authentication
+ Dalli.logger.info { "Dalli/SASL authenticating as #{username}" }
+
+ # negotiate
+ req = [REQUEST, OPCODES[:auth_negotiation], 0, 0, 0, 0, 0, 0, 0].pack(FORMAT[:noop])
+ write(req)
+
+ (extras, type, status, count) = read_header.unpack(NORMAL_HEADER)
+ raise Dalli::NetworkError, "Unexpected message format: #{extras} #{count}" unless extras == 0 && count > 0
+ content = read(count)
+ return (Dalli.logger.debug("Authentication not required/supported by server")) if status == 0x81
+ mechanisms = content.split(' ')
+ raise NotImplementedError, "Dalli only supports the PLAIN authentication mechanism" if !mechanisms.include?('PLAIN')
+
+ # request
+ mechanism = 'PLAIN'
+ msg = "\x0#{username}\x0#{password}"
+ req = [REQUEST, OPCODES[:auth_request], mechanism.bytesize, 0, 0, 0, mechanism.bytesize + msg.bytesize, 0, 0, mechanism, msg].pack(FORMAT[:auth_request])
+ write(req)
+
+ (extras, type, status, count) = read_header.unpack(NORMAL_HEADER)
+ raise Dalli::NetworkError, "Unexpected message format: #{extras} #{count}" unless extras == 0 && count > 0
+ content = read(count)
+ return Dalli.logger.info("Dalli/SASL: #{content}") if status == 0
+
+ raise Dalli::DalliError, "Error authenticating: #{status}" unless status == 0x21
+ raise NotImplementedError, "No two-step authentication mechanisms supported"
+ # (step, msg) = sasl.receive('challenge', content)
+ # raise Dalli::NetworkError, "Authentication failed" if sasl.failed? || step != 'response'
+ end
+
+ def parse_hostname(str)
+ res = str.match(/\A(\[([\h:]+)\]|[^:]+)(:(\d+))?(:(\d+))?\z/)
+ return res[2] || res[1], res[4], res[6]
+ end
+ end
+end
diff --git a/lib/dalli-2.7.2/lib/dalli/socket.rb b/lib/dalli-2.7.2/lib/dalli/socket.rb
new file mode 100644
index 000000000..54b321a4d
--- /dev/null
+++ b/lib/dalli-2.7.2/lib/dalli/socket.rb
@@ -0,0 +1,108 @@
+begin
+ require 'kgio'
+ puts "Using kgio socket IO" if defined?($TESTING) && $TESTING
+
+ class Dalli::Server::KSocket < Kgio::Socket
+ attr_accessor :options, :server
+
+ def kgio_wait_readable
+ IO.select([self], nil, nil, options[:socket_timeout]) || raise(Timeout::Error, "IO timeout")
+ end
+
+ def kgio_wait_writable
+ IO.select(nil, [self], nil, options[:socket_timeout]) || raise(Timeout::Error, "IO timeout")
+ end
+
+ def self.open(host, port, server, options = {})
+ addr = Socket.pack_sockaddr_in(port, host)
+ sock = start(addr)
+ sock.setsockopt(Socket::IPPROTO_TCP, Socket::TCP_NODELAY, true)
+ sock.setsockopt(Socket::SOL_SOCKET, Socket::SO_KEEPALIVE, true) if options[:keepalive]
+ sock.options = options
+ sock.server = server
+ sock.kgio_wait_writable
+ sock
+ end
+
+ alias :write :kgio_write
+
+ def readfull(count)
+ value = ''
+ loop do
+ value << kgio_read!(count - value.bytesize)
+ break if value.bytesize == count
+ end
+ value
+ end
+
+ def read_available
+ value = ''
+ loop do
+ ret = kgio_tryread(8196)
+ case ret
+ when nil
+ raise EOFError, 'end of stream'
+ when :wait_readable
+ break
+ else
+ value << ret
+ end
+ end
+ value
+ end
+
+ end
+
+ if ::Kgio.respond_to?(:wait_readable=)
+ ::Kgio.wait_readable = :kgio_wait_readable
+ ::Kgio.wait_writable = :kgio_wait_writable
+ end
+
+rescue LoadError
+
+ puts "Using standard socket IO (#{RUBY_DESCRIPTION})" if defined?($TESTING) && $TESTING
+ class Dalli::Server::KSocket < TCPSocket
+ attr_accessor :options, :server
+
+ def self.open(host, port, server, options = {})
+ Timeout.timeout(options[:socket_timeout]) do
+ sock = new(host, port)
+ sock.setsockopt(Socket::IPPROTO_TCP, Socket::TCP_NODELAY, true)
+ sock.setsockopt(Socket::SOL_SOCKET, Socket::SO_KEEPALIVE, true) if options[:keepalive]
+ sock.options = { :host => host, :port => port }.merge(options)
+ sock.server = server
+ sock
+ end
+ end
+
+ def readfull(count)
+ value = ''
+ begin
+ loop do
+ value << read_nonblock(count - value.bytesize)
+ break if value.bytesize == count
+ end
+ rescue Errno::EAGAIN, Errno::EWOULDBLOCK
+ if IO.select([self], nil, nil, options[:socket_timeout])
+ retry
+ else
+ raise Timeout::Error, "IO timeout: #{options.inspect}"
+ end
+ end
+ value
+ end
+
+ def read_available
+ value = ''
+ loop do
+ begin
+ value << read_nonblock(8196)
+ rescue Errno::EAGAIN, Errno::EWOULDBLOCK
+ break
+ end
+ end
+ value
+ end
+
+ end
+end
diff --git a/lib/dalli-2.7.2/lib/dalli/version.rb b/lib/dalli-2.7.2/lib/dalli/version.rb
new file mode 100644
index 000000000..5b5dfddaf
--- /dev/null
+++ b/lib/dalli-2.7.2/lib/dalli/version.rb
@@ -0,0 +1,3 @@
+module Dalli
+ VERSION = '2.7.2'
+end
diff --git a/lib/dalli-2.7.2/lib/rack/session/dalli.rb b/lib/dalli-2.7.2/lib/rack/session/dalli.rb
new file mode 100644
index 000000000..70940cb09
--- /dev/null
+++ b/lib/dalli-2.7.2/lib/rack/session/dalli.rb
@@ -0,0 +1,75 @@
+require 'rack/session/abstract/id'
+require 'dalli'
+
+module Rack
+ module Session
+ class Dalli < Abstract::ID
+ attr_reader :pool, :mutex
+
+ DEFAULT_OPTIONS = Abstract::ID::DEFAULT_OPTIONS.merge \
+ :namespace => 'rack:session',
+ :memcache_server => 'localhost:11211'
+
+ def initialize(app, options={})
+ super
+ @mutex = Mutex.new
+ mserv = @default_options[:memcache_server]
+ mopts = @default_options.reject{|k,v| !DEFAULT_OPTIONS.include? k }
+ @pool = options[:cache] || ::Dalli::Client.new(mserv, mopts)
+ @pool.alive!
+ end
+
+ def generate_sid
+ loop do
+ sid = super
+ break sid unless @pool.get(sid)
+ end
+ end
+
+ def get_session(env, sid)
+ with_lock(env, [nil, {}]) do
+ unless sid and !sid.empty? and session = @pool.get(sid)
+ sid, session = generate_sid, {}
+ unless @pool.add(sid, session)
+ raise "Session collision on '#{sid.inspect}'"
+ end
+ end
+ [sid, session]
+ end
+ end
+
+ def set_session(env, session_id, new_session, options)
+ return false unless session_id
+ expiry = options[:expire_after]
+ expiry = expiry.nil? ? 0 : expiry + 1
+
+ with_lock(env, false) do
+ @pool.set session_id, new_session, expiry
+ session_id
+ end
+ end
+
+ def destroy_session(env, session_id, options)
+ with_lock(env) do
+ @pool.delete(session_id)
+ generate_sid unless options[:drop]
+ end
+ end
+
+ def with_lock(env, default=nil)
+ @mutex.lock if env['rack.multithread']
+ yield
+ rescue ::Dalli::DalliError, Errno::ECONNREFUSED
+ raise if $!.message =~ /undefined class/
+ if $VERBOSE
+ warn "#{self} is unable to find memcached server."
+ warn $!.inspect
+ end
+ default
+ ensure
+ @mutex.unlock if @mutex.locked?
+ end
+
+ end
+ end
+end
diff --git a/lib/dalli-2.7.2/test/benchmark_test.rb b/lib/dalli-2.7.2/test/benchmark_test.rb
new file mode 100644
index 000000000..783a8b475
--- /dev/null
+++ b/lib/dalli-2.7.2/test/benchmark_test.rb
@@ -0,0 +1,242 @@
+require 'helper'
+require 'benchmark'
+require 'active_support/cache/dalli_store'
+
+describe 'performance' do
+ before do
+ puts "Testing #{Dalli::VERSION} with #{RUBY_DESCRIPTION}"
+ # We'll use a simple @value to try to avoid spending time in Marshal,
+ # which is a constant penalty that both clients have to pay
+ @value = []
+ @marshalled = Marshal.dump(@value)
+
+ @servers = ['127.0.0.1:19122', 'localhost:19122']
+ @key1 = "Short"
+ @key2 = "Sym1-2-3::45"*8
+ @key3 = "Long"*40
+ @key4 = "Medium"*8
+ # 5 and 6 are only used for multiget miss test
+ @key5 = "Medium2"*8
+ @key6 = "Long3"*40
+ @counter = 'counter'
+ end
+
+ it 'runs benchmarks' do
+ memcached do
+
+ Benchmark.bm(37) do |x|
+
+ n = 2500
+
+ @ds = ActiveSupport::Cache::DalliStore.new(@servers)
+ x.report("mixed:rails:dalli") do
+ n.times do
+ @ds.read @key1
+ @ds.write @key2, @value
+ @ds.fetch(@key3) { @value }
+ @ds.fetch(@key2) { @value }
+ @ds.fetch(@key1) { @value }
+ @ds.write @key2, @value, :unless_exists => true
+ @ds.delete @key2
+ @ds.increment @counter, 1, :initial => 100
+ @ds.increment @counter, 1, :expires_in => 12
+ @ds.decrement @counter, 1
+ end
+ end
+
+ x.report("mixed:rails-localcache:dalli") do
+ n.times do
+ @ds.with_local_cache do
+ @ds.read @key1
+ @ds.write @key2, @value
+ @ds.fetch(@key3) { @value }
+ @ds.fetch(@key2) { @value }
+ @ds.fetch(@key1) { @value }
+ @ds.write @key2, @value, :unless_exists => true
+ @ds.delete @key2
+ @ds.increment @counter, 1, :initial => 100
+ @ds.increment @counter, 1, :expires_in => 12
+ @ds.decrement @counter, 1
+ end
+ end
+ end
+
+ @ds.clear
+ sizeable_data = "" * 50
+ [@key1, @key2, @key3, @key4, @key5, @key6].each do |key|
+ @ds.write(key, sizeable_data)
+ end
+
+ x.report("read_multi_big:rails:dalli") do
+ n.times do
+ @ds.read_multi @key1, @key2, @key3, @key4
+ @ds.read @key1
+ @ds.read @key2
+ @ds.read @key3
+ @ds.read @key4
+ @ds.read @key1
+ @ds.read @key2
+ @ds.read @key3
+ @ds.read_multi @key1, @key2, @key3
+ end
+ end
+
+ x.report("read_multi_big:rails-localcache:dalli") do
+ n.times do
+ @ds.with_local_cache do
+ @ds.read_multi @key1, @key2, @key3, @key4
+ @ds.read @key1
+ @ds.read @key2
+ @ds.read @key3
+ @ds.read @key4
+ end
+ @ds.with_local_cache do
+ @ds.read @key1
+ @ds.read @key2
+ @ds.read @key3
+ @ds.read_multi @key1, @key2, @key3
+ end
+ end
+ end
+
+ @m = Dalli::Client.new(@servers)
+ x.report("set:plain:dalli") do
+ n.times do
+ @m.set @key1, @marshalled, 0, :raw => true
+ @m.set @key2, @marshalled, 0, :raw => true
+ @m.set @key3, @marshalled, 0, :raw => true
+ @m.set @key1, @marshalled, 0, :raw => true
+ @m.set @key2, @marshalled, 0, :raw => true
+ @m.set @key3, @marshalled, 0, :raw => true
+ end
+ end
+
+ @m = Dalli::Client.new(@servers)
+ x.report("setq:plain:dalli") do
+ @m.multi do
+ n.times do
+ @m.set @key1, @marshalled, 0, :raw => true
+ @m.set @key2, @marshalled, 0, :raw => true
+ @m.set @key3, @marshalled, 0, :raw => true
+ @m.set @key1, @marshalled, 0, :raw => true
+ @m.set @key2, @marshalled, 0, :raw => true
+ @m.set @key3, @marshalled, 0, :raw => true
+ end
+ end
+ end
+
+ @m = Dalli::Client.new(@servers)
+ x.report("set:ruby:dalli") do
+ n.times do
+ @m.set @key1, @value
+ @m.set @key2, @value
+ @m.set @key3, @value
+ @m.set @key1, @value
+ @m.set @key2, @value
+ @m.set @key3, @value
+ end
+ end
+
+ @m = Dalli::Client.new(@servers)
+ x.report("get:plain:dalli") do
+ n.times do
+ @m.get @key1, :raw => true
+ @m.get @key2, :raw => true
+ @m.get @key3, :raw => true
+ @m.get @key1, :raw => true
+ @m.get @key2, :raw => true
+ @m.get @key3, :raw => true
+ end
+ end
+
+ @m = Dalli::Client.new(@servers)
+ x.report("get:ruby:dalli") do
+ n.times do
+ @m.get @key1
+ @m.get @key2
+ @m.get @key3
+ @m.get @key1
+ @m.get @key2
+ @m.get @key3
+ end
+ end
+
+ @m = Dalli::Client.new(@servers)
+ x.report("multiget:ruby:dalli") do
+ n.times do
+ # We don't use the keys array because splat is slow
+ @m.get_multi @key1, @key2, @key3, @key4, @key5, @key6
+ end
+ end
+
+ @m = Dalli::Client.new(@servers)
+ x.report("missing:ruby:dalli") do
+ n.times do
+ begin @m.delete @key1; rescue; end
+ begin @m.get @key1; rescue; end
+ begin @m.delete @key2; rescue; end
+ begin @m.get @key2; rescue; end
+ begin @m.delete @key3; rescue; end
+ begin @m.get @key3; rescue; end
+ end
+ end
+
+ @m = Dalli::Client.new(@servers)
+ x.report("mixed:ruby:dalli") do
+ n.times do
+ @m.set @key1, @value
+ @m.set @key2, @value
+ @m.set @key3, @value
+ @m.get @key1
+ @m.get @key2
+ @m.get @key3
+ @m.set @key1, @value
+ @m.get @key1
+ @m.set @key2, @value
+ @m.get @key2
+ @m.set @key3, @value
+ @m.get @key3
+ end
+ end
+
+ @m = Dalli::Client.new(@servers)
+ x.report("mixedq:ruby:dalli") do
+ @m.multi do
+ n.times do
+ @m.set @key1, @value
+ @m.set @key2, @value
+ @m.set @key3, @value
+ @m.get @key1
+ @m.get @key2
+ @m.get @key3
+ @m.set @key1, @value
+ @m.get @key1
+ @m.set @key2, @value
+ @m.replace @key2, @value
+ @m.delete @key3
+ @m.add @key3, @value
+ @m.get @key2
+ @m.set @key3, @value
+ @m.get @key3
+ end
+ end
+ end
+
+ @m = Dalli::Client.new(@servers)
+ x.report("incr:ruby:dalli") do
+ counter = 'foocount'
+ n.times do
+ @m.incr counter, 1, 0, 1
+ end
+ n.times do
+ @m.decr counter, 1
+ end
+
+ assert_equal 0, @m.incr(counter, 0)
+ end
+
+ end
+ end
+
+ end
+end
diff --git a/lib/dalli-2.7.2/test/helper.rb b/lib/dalli-2.7.2/test/helper.rb
new file mode 100644
index 000000000..9f72ec5e9
--- /dev/null
+++ b/lib/dalli-2.7.2/test/helper.rb
@@ -0,0 +1,55 @@
+$TESTING = true
+require 'rubygems'
+# require 'simplecov'
+# SimpleCov.start
+require 'minitest/pride'
+require 'minitest/autorun'
+require 'mocha/setup'
+require 'memcached_mock'
+
+ENV['MEMCACHED_SASL_PWDB'] = "#{File.dirname(__FILE__)}/sasldb"
+
+WANT_RAILS_VERSION = ENV['RAILS_VERSION'] || '>= 3.0.0'
+gem 'rails', WANT_RAILS_VERSION
+require 'rails'
+puts "Testing with Rails #{Rails.version}"
+
+require 'dalli'
+require 'logger'
+
+Dalli.logger = Logger.new(STDOUT)
+Dalli.logger.level = Logger::ERROR
+
+class MiniTest::Spec
+ include MemcachedMock::Helper
+
+ def assert_error(error, regexp=nil, &block)
+ ex = assert_raises(error, &block)
+ assert_match(regexp, ex.message, "#{ex.class.name}: #{ex.message}\n#{ex.backtrace.join("\n\t")}")
+ end
+
+ def op_cas_succeeds(rsp)
+ rsp.is_a?(Integer) && rsp > 0
+ end
+
+ def op_replace_succeeds(rsp)
+ rsp.is_a?(Integer) && rsp > 0
+ end
+
+ # add and set must have the same return value because of DalliStore#write_entry
+ def op_addset_succeeds(rsp)
+ rsp.is_a?(Integer) && rsp > 0
+ end
+
+ def with_activesupport
+ require 'active_support/all'
+ require 'active_support/cache/dalli_store'
+ yield
+ end
+
+ def with_actionpack
+ require 'action_dispatch'
+ require 'action_controller'
+ yield
+ end
+end
diff --git a/lib/dalli-2.7.2/test/memcached_mock.rb b/lib/dalli-2.7.2/test/memcached_mock.rb
new file mode 100644
index 000000000..7fdd8c1a8
--- /dev/null
+++ b/lib/dalli-2.7.2/test/memcached_mock.rb
@@ -0,0 +1,121 @@
+require "socket"
+
+$started = {}
+
+module MemcachedMock
+ def self.start(port=19123, &block)
+ server = TCPServer.new("localhost", port)
+ session = server.accept
+ block.call session
+ end
+
+ def self.delayed_start(port=19123, wait=1, &block)
+ server = TCPServer.new("localhost", port)
+ sleep wait
+ block.call server
+ end
+
+ module Helper
+ # Forks the current process and starts a new mock Memcached server on
+ # port 22122.
+ #
+ # memcached_mock(lambda {|sock| socket.write('123') }) do
+ # assert_equal "PONG", Dalli::Client.new('localhost:22122').get('abc')
+ # end
+ #
+ def memcached_mock(proc, meth = :start)
+ return unless supports_fork?
+ begin
+ pid = fork do
+ trap("TERM") { exit }
+
+ MemcachedMock.send(meth) do |*args|
+ proc.call(*args)
+ end
+ end
+
+ sleep 0.3 # Give time for the socket to start listening.
+ yield
+ ensure
+ if pid
+ Process.kill("TERM", pid)
+ Process.wait(pid)
+ end
+ end
+ end
+
+ PATHS = %w(
+ /usr/local/bin/
+ /opt/local/bin/
+ /usr/bin/
+ )
+
+ def find_memcached
+ output = `memcached -h | head -1`.strip
+ if output && output =~ /^memcached (\d.\d.\d+)/ && $1 > '1.4'
+ return (puts "Found #{output} in PATH"; '')
+ end
+ PATHS.each do |path|
+ output = `memcached -h | head -1`.strip
+ if output && output =~ /^memcached (\d\.\d\.\d+)/ && $1 > '1.4'
+ return (puts "Found #{output} in #{path}"; path)
+ end
+ end
+
+ raise Errno::ENOENT, "Unable to find memcached 1.4+ locally"
+ end
+
+ def memcached(port=19122, args='', options={})
+ memcached_server(port, args)
+ yield Dalli::Client.new(["localhost:#{port}", "127.0.0.1:#{port}"], options)
+ end
+
+ def memcached_cas(port=19122, args='', options={})
+ memcached_server(port, args)
+ require 'dalli/cas/client'
+ yield Dalli::Client.new(["localhost:#{port}", "127.0.0.1:#{port}"], options)
+ end
+
+ def memcached_server(port=19122, args='')
+ Memcached.path ||= find_memcached
+
+ cmd = "#{Memcached.path}memcached #{args} -p #{port}"
+
+ $started[port] ||= begin
+ #puts "Starting: #{cmd}..."
+ pid = IO.popen(cmd).pid
+ at_exit do
+ begin
+ Process.kill("TERM", pid)
+ Process.wait(pid)
+ rescue Errno::ECHILD, Errno::ESRCH
+ end
+ end
+ sleep 0.1
+ pid
+ end
+ end
+
+ def supports_fork?
+ !defined?(RUBY_ENGINE) || RUBY_ENGINE != 'jruby'
+ end
+
+ def memcached_kill(port)
+ pid = $started.delete(port)
+ if pid
+ begin
+ Process.kill("TERM", pid)
+ Process.wait(pid)
+ rescue Errno::ECHILD, Errno::ESRCH
+ end
+ end
+ end
+
+ end
+end
+
+module Memcached
+ class << self
+ attr_accessor :path
+ end
+end
diff --git a/lib/dalli-2.7.2/test/sasldb b/lib/dalli-2.7.2/test/sasldb
new file mode 100644
index 000000000..26cb137aa
--- /dev/null
+++ b/lib/dalli-2.7.2/test/sasldb
@@ -0,0 +1 @@
+testuser:testtest:::::::
diff --git a/lib/dalli-2.7.2/test/test_active_support.rb b/lib/dalli-2.7.2/test/test_active_support.rb
new file mode 100644
index 000000000..4af18163f
--- /dev/null
+++ b/lib/dalli-2.7.2/test/test_active_support.rb
@@ -0,0 +1,439 @@
+# encoding: utf-8
+require 'helper'
+require 'connection_pool'
+
+class MockUser
+ def cache_key
+ "users/1/21348793847982314"
+ end
+end
+
+describe 'ActiveSupport' do
+ describe 'active_support caching' do
+
+ it 'has accessible options' do
+ @dalli = ActiveSupport::Cache.lookup_store(:dalli_store, 'localhost:19122', :expires_in => 5.minutes, :frob => 'baz')
+ assert_equal 'baz', @dalli.options[:frob]
+ end
+
+ it 'allow mute and silence' do
+ @dalli = ActiveSupport::Cache.lookup_store(:dalli_store, 'localhost:19122')
+ @dalli.mute do
+ assert op_addset_succeeds(@dalli.write('foo', 'bar', nil))
+ assert_equal 'bar', @dalli.read('foo', nil)
+ end
+ refute @dalli.silence?
+ @dalli.silence!
+ assert_equal true, @dalli.silence?
+ end
+
+ it 'handle nil options' do
+ @dalli = ActiveSupport::Cache.lookup_store(:dalli_store, 'localhost:19122')
+ assert op_addset_succeeds(@dalli.write('foo', 'bar', nil))
+ assert_equal 'bar', @dalli.read('foo', nil)
+ assert_equal 18, @dalli.fetch('lkjsadlfk', nil) { 18 }
+ assert_equal 18, @dalli.fetch('lkjsadlfk', nil) { 18 }
+ assert_equal 1, @dalli.increment('lkjsa', 1, nil)
+ assert_equal 2, @dalli.increment('lkjsa', 1, nil)
+ assert_equal 1, @dalli.decrement('lkjsa', 1, nil)
+ assert_equal true, @dalli.delete('lkjsa')
+ end
+
+ it 'support fetch' do
+ with_activesupport do
+ memcached do
+ connect
+ dvalue = @dalli.fetch('someotherkeywithoutspaces', :expires_in => 1.second) { 123 }
+ assert_equal 123, dvalue
+
+ o = Object.new
+ o.instance_variable_set :@foo, 'bar'
+ dvalue = @dalli.fetch(rand_key, :raw => true) { o }
+ assert_equal o, dvalue
+
+ dvalue = @dalli.fetch(rand_key) { o }
+ assert_equal o, dvalue
+
+ @dalli.write('false', false)
+ dvalue = @dalli.fetch('false') { flunk }
+ assert_equal false, dvalue
+
+ user = MockUser.new
+ @dalli.write(user.cache_key, false)
+ dvalue = @dalli.fetch(user) { flunk }
+ assert_equal false, dvalue
+ end
+ end
+ end
+
+ it 'support keys with spaces on Rails3' do
+ with_activesupport do
+ memcached do
+ connect
+ dvalue = @dalli.fetch('some key with spaces', :expires_in => 1.second) { 123 }
+ assert_equal 123, dvalue
+ end
+ end
+ end
+
+ it 'support read_multi' do
+ with_activesupport do
+ memcached do
+ connect
+ x = rand_key
+ y = rand_key
+ assert_equal({}, @dalli.read_multi(x, y))
+ @dalli.write(x, '123')
+ @dalli.write(y, 123)
+ assert_equal({ x => '123', y => 123 }, @dalli.read_multi(x, y))
+ end
+ end
+ end
+
+ it 'support read_multi with an array' do
+ with_activesupport do
+ memcached do
+ connect
+ x = rand_key
+ y = rand_key
+ assert_equal({}, @dalli.read_multi([x, y]))
+ @dalli.write(x, '123')
+ @dalli.write(y, 123)
+ assert_equal({}, @dalli.read_multi([x, y]))
+ @dalli.write([x, y], '123')
+ assert_equal({ [x, y] => '123' }, @dalli.read_multi([x, y]))
+ end
+ end
+ end
+
+ it 'support raw read_multi' do
+ with_activesupport do
+ memcached do
+ connect
+ @dalli.write("abc", 5, :raw => true)
+ @dalli.write("cba", 5, :raw => true)
+ assert_equal({'abc' => '5', 'cba' => '5' }, @dalli.read_multi("abc", "cba"))
+ end
+ end
+ end
+
+ it 'support read_multi with LocalCache' do
+ with_activesupport do
+ memcached do
+ connect
+ x = rand_key
+ y = rand_key
+ assert_equal({}, @dalli.read_multi(x, y))
+ @dalli.write(x, '123')
+ @dalli.write(y, 456)
+
+ @dalli.with_local_cache do
+ assert_equal({ x => '123', y => 456 }, @dalli.read_multi(x, y))
+ Dalli::Client.any_instance.expects(:get).with(any_parameters).never
+
+ dres = @dalli.read(x)
+ assert_equal dres, '123'
+ end
+
+ Dalli::Client.any_instance.unstub(:get)
+
+ # Fresh LocalStore
+ @dalli.with_local_cache do
+ @dalli.read(x)
+ Dalli::Client.any_instance.expects(:get_multi).with([y.to_s]).returns(y.to_s => 456)
+
+ assert_equal({ x => '123', y => 456}, @dalli.read_multi(x, y))
+ end
+ end
+ end
+ end
+
+ it 'supports fetch_multi' do
+ with_activesupport do
+ memcached do
+ connect
+
+ x = rand_key.to_s
+ y = rand_key
+ hash = { x => 'ABC', y => 'DEF' }
+
+ @dalli.write(y, '123')
+
+ results = @dalli.fetch_multi(x, y) { |key| hash[key] }
+
+ assert_equal({ x => 'ABC', y => '123' }, results)
+ assert_equal('ABC', @dalli.read(x))
+ assert_equal('123', @dalli.read(y))
+ end
+ end
+ end
+
+ it 'support read, write and delete' do
+ with_activesupport do
+ memcached do
+ connect
+ y = rand_key
+ assert_nil @dalli.read(y)
+ dres = @dalli.write(y, 123)
+ assert op_addset_succeeds(dres)
+
+ dres = @dalli.read(y)
+ assert_equal 123, dres
+
+ dres = @dalli.delete(y)
+ assert_equal true, dres
+
+ user = MockUser.new
+ dres = @dalli.write(user.cache_key, "foo")
+ assert op_addset_succeeds(dres)
+
+ dres = @dalli.read(user)
+ assert_equal "foo", dres
+
+ dres = @dalli.delete(user)
+ assert_equal true, dres
+
+ bigkey = '123456789012345678901234567890'
+ @dalli.write(bigkey, 'double width')
+ assert_equal 'double width', @dalli.read(bigkey)
+ assert_equal({bigkey => "double width"}, @dalli.read_multi(bigkey))
+ end
+ end
+ end
+
+ it 'support read, write and delete with LocalCache' do
+ with_activesupport do
+ memcached do
+ connect
+ y = rand_key.to_s
+ @dalli.with_local_cache do
+ Dalli::Client.any_instance.expects(:get).with(y, {}).once.returns(123)
+ dres = @dalli.read(y)
+ assert_equal 123, dres
+
+ Dalli::Client.any_instance.expects(:get).with(y, {}).never
+
+ dres = @dalli.read(y)
+ assert_equal 123, dres
+
+ @dalli.write(y, 456)
+ dres = @dalli.read(y)
+ assert_equal 456, dres
+
+ @dalli.delete(y)
+ Dalli::Client.any_instance.expects(:get).with(y, {}).once.returns(nil)
+ dres = @dalli.read(y)
+ assert_equal nil, dres
+ end
+ end
+ end
+ end
+
+ it 'support unless_exist with LocalCache' do
+ with_activesupport do
+ memcached do
+ connect
+ y = rand_key.to_s
+ @dalli.with_local_cache do
+ Dalli::Client.any_instance.expects(:add).with(y, 123, nil, {:unless_exist => true}).once.returns(true)
+ dres = @dalli.write(y, 123, :unless_exist => true)
+ assert_equal true, dres
+
+ Dalli::Client.any_instance.expects(:add).with(y, 321, nil, {:unless_exist => true}).once.returns(false)
+
+ dres = @dalli.write(y, 321, :unless_exist => true)
+ assert_equal false, dres
+
+ Dalli::Client.any_instance.expects(:get).with(y, {}).once.returns(123)
+
+ dres = @dalli.read(y)
+ assert_equal 123, dres
+ end
+ end
+ end
+ end
+
+ it 'support increment/decrement commands' do
+ with_activesupport do
+ memcached do
+ connect
+ assert op_addset_succeeds(@dalli.write('counter', 0, :raw => true))
+ assert_equal 1, @dalli.increment('counter')
+ assert_equal 2, @dalli.increment('counter')
+ assert_equal 1, @dalli.decrement('counter')
+ assert_equal "1", @dalli.read('counter', :raw => true)
+
+ assert_equal 1, @dalli.increment('counterX')
+ assert_equal 2, @dalli.increment('counterX')
+ assert_equal 2, @dalli.read('counterX', :raw => true).to_i
+
+ assert_equal 5, @dalli.increment('counterY1', 1, :initial => 5)
+ assert_equal 6, @dalli.increment('counterY1', 1, :initial => 5)
+ assert_equal 6, @dalli.read('counterY1', :raw => true).to_i
+
+ assert_equal nil, @dalli.increment('counterZ1', 1, :initial => nil)
+ assert_equal nil, @dalli.read('counterZ1')
+
+ assert_equal 5, @dalli.decrement('counterY2', 1, :initial => 5)
+ assert_equal 4, @dalli.decrement('counterY2', 1, :initial => 5)
+ assert_equal 4, @dalli.read('counterY2', :raw => true).to_i
+
+ assert_equal nil, @dalli.decrement('counterZ2', 1, :initial => nil)
+ assert_equal nil, @dalli.read('counterZ2')
+
+ user = MockUser.new
+ assert op_addset_succeeds(@dalli.write(user, 0, :raw => true))
+ assert_equal 1, @dalli.increment(user)
+ assert_equal 2, @dalli.increment(user)
+ assert_equal 1, @dalli.decrement(user)
+ assert_equal "1", @dalli.read(user, :raw => true)
+ end
+ end
+ end
+
+ it 'support exist command' do
+ with_activesupport do
+ memcached do
+ connect
+ @dalli.write(:foo, 'a')
+ @dalli.write(:false_value, false)
+
+ assert_equal true, @dalli.exist?(:foo)
+ assert_equal true, @dalli.exist?(:false_value)
+
+ assert_equal false, @dalli.exist?(:bar)
+
+ user = MockUser.new
+ @dalli.write(user, 'foo')
+ assert_equal true, @dalli.exist?(user)
+ end
+ end
+ end
+
+ it 'support other esoteric commands' do
+ with_activesupport do
+ memcached do
+ connect
+ ds = @dalli.stats
+ assert_equal 1, ds.keys.size
+ assert ds[ds.keys.first].keys.size > 0
+
+ @dalli.reset
+ end
+ end
+ end
+
+ it 'respect "raise_errors" option' do
+ with_activesupport do
+ memcached(29125) do
+ @dalli = ActiveSupport::Cache.lookup_store(:dalli_store, 'localhost:29125')
+ @dalli.write 'foo', 'bar'
+ assert_equal @dalli.read('foo'), 'bar'
+
+ memcached_kill(29125)
+
+ assert_equal @dalli.read('foo'), nil
+
+ @dalli = ActiveSupport::Cache.lookup_store(:dalli_store, 'localhost:29125', :raise_errors => true)
+
+ exception = [Dalli::RingError, { :message => "No server available" }]
+
+ assert_raises(*exception) { @dalli.read 'foo' }
+ assert_raises(*exception) { @dalli.read 'foo', :raw => true }
+ assert_raises(*exception) { @dalli.write 'foo', 'bar' }
+ assert_raises(*exception) { @dalli.exist? 'foo' }
+ assert_raises(*exception) { @dalli.increment 'foo' }
+ assert_raises(*exception) { @dalli.decrement 'foo' }
+ assert_raises(*exception) { @dalli.delete 'foo' }
+ assert_equal @dalli.read_multi('foo', 'bar'), {}
+ assert_raises(*exception) { @dalli.delete 'foo' }
+ assert_raises(*exception) { @dalli.fetch('foo') { 42 } }
+ end
+ end
+ end
+ end
+
+ it 'handle crazy characters from far-away lands' do
+ with_activesupport do
+ memcached do
+ connect
+ key = "fooƒ"
+ value = 'bafƒ'
+ assert op_addset_succeeds(@dalli.write(key, value))
+ assert_equal value, @dalli.read(key)
+ end
+ end
+ end
+
+ it 'normalize options as expected' do
+ with_activesupport do
+ memcached do
+ @dalli = ActiveSupport::Cache::DalliStore.new('localhost:19122', :expires_in => 1, :namespace => 'foo', :compress => true)
+ assert_equal 1, @dalli.instance_variable_get(:@data).instance_variable_get(:@options)[:expires_in]
+ assert_equal 'foo', @dalli.instance_variable_get(:@data).instance_variable_get(:@options)[:namespace]
+ assert_equal ["localhost:19122"], @dalli.instance_variable_get(:@data).instance_variable_get(:@servers)
+ end
+ end
+ end
+
+ it 'handles nil server with additional options' do
+ with_activesupport do
+ memcached do
+ @dalli = ActiveSupport::Cache::DalliStore.new(nil, :expires_in => 1, :namespace => 'foo', :compress => true)
+ assert_equal 1, @dalli.instance_variable_get(:@data).instance_variable_get(:@options)[:expires_in]
+ assert_equal 'foo', @dalli.instance_variable_get(:@data).instance_variable_get(:@options)[:namespace]
+ assert_equal ["127.0.0.1:11211"], @dalli.instance_variable_get(:@data).instance_variable_get(:@servers)
+ end
+ end
+ end
+
+ it 'supports connection pooling' do
+ with_activesupport do
+ memcached do
+ @dalli = ActiveSupport::Cache::DalliStore.new('localhost:19122', :expires_in => 1, :namespace => 'foo', :compress => true, :pool_size => 3)
+ assert_equal nil, @dalli.read('foo')
+ assert @dalli.write('foo', 1)
+ assert_equal 1, @dalli.fetch('foo') { raise 'boom' }
+ assert_equal true, @dalli.dalli.is_a?(ConnectionPool)
+ assert_equal 1, @dalli.increment('bar')
+ assert_equal 0, @dalli.decrement('bar')
+ assert_equal true, @dalli.delete('bar')
+ assert_equal [true], @dalli.clear
+ assert_equal 1, @dalli.stats.size
+ end
+ end
+ end
+
+ it 'allow keys to be frozen' do
+ with_activesupport do
+ memcached do
+ connect
+ key = "foo"
+ key.freeze
+ assert op_addset_succeeds(@dalli.write(key, "value"))
+ end
+ end
+ end
+
+ it 'allow keys from a hash' do
+ with_activesupport do
+ memcached do
+ connect
+ map = { "one" => "one", "two" => "two" }
+ map.each_pair do |k, v|
+ assert op_addset_succeeds(@dalli.write(k, v))
+ end
+ assert_equal map, @dalli.read_multi(*(map.keys))
+ end
+ end
+ end
+
+ def connect
+ @dalli = ActiveSupport::Cache.lookup_store(:dalli_store, 'localhost:19122', :expires_in => 10.seconds, :namespace => lambda{33.to_s(36)})
+ @dalli.clear
+ end
+
+ def rand_key
+ rand(1_000_000_000)
+ end
+end
diff --git a/lib/dalli-2.7.2/test/test_cas_client.rb b/lib/dalli-2.7.2/test/test_cas_client.rb
new file mode 100644
index 000000000..423e36022
--- /dev/null
+++ b/lib/dalli-2.7.2/test/test_cas_client.rb
@@ -0,0 +1,107 @@
+require 'helper'
+require 'memcached_mock'
+
+describe 'Dalli::Cas::Client' do
+ describe 'using a live server' do
+ it 'supports get with CAS' do
+ memcached_cas do |dc|
+ dc.flush
+
+ expected = { 'blah' => 'blerg!' }
+ get_block_called = false
+ stored_value = stored_cas = nil
+ # Validate call-with-block
+ dc.get_cas('gets_key') do |v, cas|
+ get_block_called = true
+ stored_value = v
+ stored_cas = cas
+ end
+ assert get_block_called
+ assert_nil stored_value
+
+ dc.set('gets_key', expected)
+
+ # Validate call-with-return-value
+ stored_value, stored_cas = dc.get_cas('gets_key')
+ assert_equal stored_value, expected
+ assert(stored_cas != 0)
+ end
+ end
+
+ it 'supports multi-get with CAS' do
+ memcached_cas do |dc|
+ dc.close
+ dc.flush
+
+ expected_hash = {'a' => 'foo', 'b' => 123}
+ expected_hash.each_pair do |k, v|
+ dc.set(k, v)
+ end
+
+ # Invocation without block
+ resp = dc.get_multi_cas(%w(a b c d e f))
+ resp.each_pair do |k, data|
+ value, cas = [data.first, data.second]
+ assert_equal expected_hash[k], value
+ assert(cas && cas != 0)
+ end
+
+ # Invocation with block
+ dc.get_multi_cas(%w(a b c d e f)) do |k, data|
+ value, cas = [data.first, data.second]
+ assert_equal expected_hash[k], value
+ assert(cas && cas != 0)
+ end
+ end
+ end
+
+ it 'supports replace-with-CAS operation' do
+ memcached_cas do |dc|
+ dc.flush
+ cas = dc.set('key', 'value')
+
+ # Accepts CAS, replaces, and returns new CAS
+ cas = dc.replace_cas('key', 'value2', cas)
+ assert cas.is_a?(Integer)
+
+ assert_equal 'value2', dc.get('key')
+ end
+ end
+
+ it 'supports delete with CAS' do
+ memcached_cas do |dc|
+ cas = dc.set('some_key', 'some_value')
+ dc.delete_cas('some_key', cas)
+ assert_nil dc.get('some_key')
+ end
+ end
+
+ it 'handles CAS round-trip operations' do
+ memcached_cas do |dc|
+ dc.flush
+
+ expected = {'blah' => 'blerg!'}
+ dc.set('some_key', expected)
+
+ value, cas = dc.get_cas('some_key')
+ assert_equal value, expected
+ assert(!cas.nil? && cas != 0)
+
+ # Set operation, first with wrong then with correct CAS
+ expected = {'blah' => 'set succeeded'}
+ assert(dc.set_cas('some_key', expected, cas+1) == false)
+ assert op_addset_succeeds(cas = dc.set_cas('some_key', expected, cas))
+
+ # Replace operation, first with wrong then with correct CAS
+ expected = {'blah' => 'replace succeeded'}
+ assert(dc.replace_cas('some_key', expected, cas+1) == false)
+ assert op_addset_succeeds(cas = dc.replace_cas('some_key', expected, cas))
+
+ # Delete operation, first with wrong then with correct CAS
+ assert(dc.delete_cas('some_key', cas+1) == false)
+ assert dc.delete_cas('some_key', cas)
+ end
+ end
+
+ end
+end
diff --git a/lib/dalli-2.7.2/test/test_compressor.rb b/lib/dalli-2.7.2/test/test_compressor.rb
new file mode 100644
index 000000000..8b024862b
--- /dev/null
+++ b/lib/dalli-2.7.2/test/test_compressor.rb
@@ -0,0 +1,53 @@
+# encoding: utf-8
+require 'helper'
+require 'json'
+require 'memcached_mock'
+
+class NoopCompressor
+ def self.compress(data)
+ data
+ end
+
+ def self.decompress(data)
+ data
+ end
+end
+
+describe 'Compressor' do
+
+ it 'default to Dalli::Compressor' do
+ memcached_kill(29199) do |dc|
+ memcache = Dalli::Client.new('127.0.0.1:29199')
+ memcache.set 1,2
+ assert_equal Dalli::Compressor, memcache.instance_variable_get('@ring').servers.first.compressor
+ end
+ end
+
+ it 'support a custom compressor' do
+ memcached_kill(29199) do |dc|
+ memcache = Dalli::Client.new('127.0.0.1:29199', :compressor => NoopCompressor)
+ memcache.set 1,2
+ begin
+ assert_equal NoopCompressor, memcache.instance_variable_get('@ring').servers.first.compressor
+
+ memcached(19127) do |newdc|
+ assert newdc.set("string-test", "a test string")
+ assert_equal("a test string", newdc.get("string-test"))
+ end
+ end
+ end
+ end
+end
+
+describe 'GzipCompressor' do
+
+ it 'compress and uncompress data using Zlib::GzipWriter/Reader' do
+ memcached(19127,nil,{:compress=>true,:compressor=>Dalli::GzipCompressor}) do |dc|
+ data = (0...1025).map{65.+(rand(26)).chr}.join
+ assert dc.set("test", data)
+ assert_equal Dalli::GzipCompressor, dc.instance_variable_get('@ring').servers.first.compressor
+ assert_equal(data, dc.get("test"))
+ end
+ end
+
+end
diff --git a/lib/dalli-2.7.2/test/test_dalli.rb b/lib/dalli-2.7.2/test/test_dalli.rb
new file mode 100644
index 000000000..fb6799184
--- /dev/null
+++ b/lib/dalli-2.7.2/test/test_dalli.rb
@@ -0,0 +1,625 @@
+require 'helper'
+require 'memcached_mock'
+
+describe 'Dalli' do
+ describe 'options parsing' do
+ it 'handle deprecated options' do
+ dc = Dalli::Client.new('foo', :compression => true)
+ assert dc.instance_variable_get(:@options)[:compress]
+ refute dc.instance_variable_get(:@options)[:compression]
+ end
+
+ it 'not warn about valid options' do
+ dc = Dalli::Client.new('foo', :compress => true)
+ # Rails.logger.expects :warn
+ assert dc.instance_variable_get(:@options)[:compress]
+ end
+
+ it 'raises error with invalid expires_in' do
+ bad_data = [{:bad => 'expires in data'}, Hash, [1,2,3]]
+ bad_data.each do |bad|
+ assert_raises ArgumentError do
+ Dalli::Client.new('foo', {:expires_in => bad})
+ end
+ end
+ end
+
+ it 'return string type for namespace attribute' do
+ dc = Dalli::Client.new('foo', :namespace => :wunderschoen)
+ assert_equal "wunderschoen", dc.send(:namespace)
+ dc.close
+
+ dc = Dalli::Client.new('foo', :namespace => Proc.new{:wunderschoen})
+ assert_equal "wunderschoen", dc.send(:namespace)
+ dc.close
+ end
+ end
+
+ describe 'key validation' do
+ it 'not allow blanks' do
+ memcached do |dc|
+ dc.set ' ', 1
+ assert_equal 1, dc.get(' ')
+ dc.set "\t", 1
+ assert_equal 1, dc.get("\t")
+ dc.set "\n", 1
+ assert_equal 1, dc.get("\n")
+ assert_raises ArgumentError do
+ dc.set "", 1
+ end
+ assert_raises ArgumentError do
+ dc.set nil, 1
+ end
+ end
+ end
+
+ it 'allow namespace to be a symbol' do
+ memcached(19122, '', :namespace => :wunderschoen) do |dc|
+ dc.set "x" * 251, 1
+ assert 1, dc.get("#{'x' * 200}:md5:#{Digest::MD5.hexdigest('x' * 251)}")
+ end
+ end
+ end
+
+ it "default to localhost:11211" do
+ dc = Dalli::Client.new
+ ring = dc.send(:ring)
+ s1 = ring.servers.first.hostname
+ assert_equal 1, ring.servers.size
+ dc.close
+
+ dc = Dalli::Client.new('localhost:11211')
+ ring = dc.send(:ring)
+ s2 = ring.servers.first.hostname
+ assert_equal 1, ring.servers.size
+ dc.close
+
+ dc = Dalli::Client.new(['localhost:11211'])
+ ring = dc.send(:ring)
+ s3 = ring.servers.first.hostname
+ assert_equal 1, ring.servers.size
+ dc.close
+
+ assert_equal '127.0.0.1', s1
+ assert_equal s2, s3
+ end
+
+ it "accept comma separated string" do
+ dc = Dalli::Client.new("server1.example.com:11211,server2.example.com:11211")
+ ring = dc.send(:ring)
+ assert_equal 2, ring.servers.size
+ s1,s2 = ring.servers.map(&:hostname)
+ assert_equal "server1.example.com", s1
+ assert_equal "server2.example.com", s2
+ end
+
+ it "accept array of servers" do
+ dc = Dalli::Client.new(["server1.example.com:11211","server2.example.com:11211"])
+ ring = dc.send(:ring)
+ assert_equal 2, ring.servers.size
+ s1,s2 = ring.servers.map(&:hostname)
+ assert_equal "server1.example.com", s1
+ assert_equal "server2.example.com", s2
+ end
+
+ describe 'using a live server' do
+
+ it "support get/set" do
+ memcached do |dc|
+ dc.flush
+
+ val1 = "1234567890"*105000
+ assert_equal false, dc.set('a', val1)
+
+ val1 = "1234567890"*100000
+ dc.set('a', val1)
+ val2 = dc.get('a')
+ assert_equal val1, val2
+
+ assert op_addset_succeeds(dc.set('a', nil))
+ assert_nil dc.get('a')
+ end
+ end
+
+ it 'supports delete' do
+ memcached do |dc|
+ dc.set('some_key', 'some_value')
+ assert_equal 'some_value', dc.get('some_key')
+
+ dc.delete('some_key')
+ assert_nil dc.get('some_key')
+ end
+ end
+
+ it 'returns nil for nonexist key' do
+ memcached do |dc|
+ assert_equal nil, dc.get('notexist')
+ end
+ end
+
+ it 'allows "Not found" as value' do
+ memcached do |dc|
+ dc.set('key1', 'Not found')
+ assert_equal 'Not found', dc.get('key1')
+ end
+ end
+
+ it "support stats" do
+ memcached do |dc|
+ # make sure that get_hits would not equal 0
+ dc.get(:a)
+
+ stats = dc.stats
+ servers = stats.keys
+ assert(servers.any? do |s|
+ stats[s]["get_hits"].to_i != 0
+ end, "general stats failed")
+
+ stats_items = dc.stats(:items)
+ servers = stats_items.keys
+ assert(servers.all? do |s|
+ stats_items[s].keys.any? do |key|
+ key =~ /items:[0-9]+:number/
+ end
+ end, "stats items failed")
+
+ stats_slabs = dc.stats(:slabs)
+ servers = stats_slabs.keys
+ assert(servers.all? do |s|
+ stats_slabs[s].keys.any? do |key|
+ key == "active_slabs"
+ end
+ end, "stats slabs failed")
+
+ # reset_stats test
+ results = dc.reset_stats
+ assert(results.all? { |x| x })
+ stats = dc.stats
+ servers = stats.keys
+
+ # check if reset was performed
+ servers.each do |s|
+ assert_equal 0, dc.stats[s]["get_hits"].to_i
+ end
+ end
+ end
+
+ it "support the fetch operation" do
+ memcached do |dc|
+ dc.flush
+
+ expected = { 'blah' => 'blerg!' }
+ executed = false
+ value = dc.fetch('fetch_key') do
+ executed = true
+ expected
+ end
+ assert_equal expected, value
+ assert_equal true, executed
+
+ executed = false
+ value = dc.fetch('fetch_key') do
+ executed = true
+ expected
+ end
+ assert_equal expected, value
+ assert_equal false, executed
+ end
+ end
+
+ it "support the fetch operation with falsey values" do
+ memcached do |dc|
+ dc.flush
+
+ dc.set("fetch_key", false)
+ res = dc.fetch("fetch_key") { flunk "fetch block called" }
+ assert_equal false, res
+
+ dc.set("fetch_key", nil)
+ res = dc.fetch("fetch_key") { "bob" }
+ assert_equal 'bob', res
+ end
+ end
+
+ it "support the cas operation" do
+ memcached do |dc|
+ dc.flush
+
+ expected = { 'blah' => 'blerg!' }
+
+ resp = dc.cas('cas_key') do |value|
+ fail('Value it not exist')
+ end
+ assert_nil resp
+
+ mutated = { 'blah' => 'foo!' }
+ dc.set('cas_key', expected)
+ resp = dc.cas('cas_key') do |value|
+ assert_equal expected, value
+ mutated
+ end
+ assert op_cas_succeeds(resp)
+
+ resp = dc.get('cas_key')
+ assert_equal mutated, resp
+ end
+ end
+
+ it "support multi-get" do
+ memcached do |dc|
+ dc.close
+ dc.flush
+ resp = dc.get_multi(%w(a b c d e f))
+ assert_equal({}, resp)
+
+ dc.set('a', 'foo')
+ dc.set('b', 123)
+ dc.set('c', %w(a b c))
+ # Invocation without block
+ resp = dc.get_multi(%w(a b c d e f))
+ expected_resp = { 'a' => 'foo', 'b' => 123, 'c' => %w(a b c) }
+ assert_equal(expected_resp, resp)
+
+ # Invocation with block
+ dc.get_multi(%w(a b c d e f)) do |k, v|
+ assert(expected_resp.has_key?(k) && expected_resp[k] == v)
+ expected_resp.delete(k)
+ end
+ assert expected_resp.empty?
+
+ # Perform a big multi-get with 1000 elements.
+ arr = []
+ dc.multi do
+ 1000.times do |idx|
+ dc.set idx, idx
+ arr << idx
+ end
+ end
+
+ result = dc.get_multi(arr)
+ assert_equal(1000, result.size)
+ assert_equal(50, result['50'])
+ end
+ end
+
+ it 'support raw incr/decr' do
+ memcached do |client|
+ client.flush
+
+ assert op_addset_succeeds(client.set('fakecounter', 0, 0, :raw => true))
+ assert_equal 1, client.incr('fakecounter', 1)
+ assert_equal 2, client.incr('fakecounter', 1)
+ assert_equal 3, client.incr('fakecounter', 1)
+ assert_equal 1, client.decr('fakecounter', 2)
+ assert_equal "1", client.get('fakecounter', :raw => true)
+
+ resp = client.incr('mycounter', 0)
+ assert_nil resp
+
+ resp = client.incr('mycounter', 1, 0, 2)
+ assert_equal 2, resp
+ resp = client.incr('mycounter', 1)
+ assert_equal 3, resp
+
+ resp = client.set('rawcounter', 10, 0, :raw => true)
+ assert op_cas_succeeds(resp)
+
+ resp = client.get('rawcounter', :raw => true)
+ assert_equal '10', resp
+
+ resp = client.incr('rawcounter', 1)
+ assert_equal 11, resp
+ end
+ end
+
+ it "support incr/decr operations" do
+ memcached do |dc|
+ dc.flush
+
+ resp = dc.decr('counter', 100, 5, 0)
+ assert_equal 0, resp
+
+ resp = dc.decr('counter', 10)
+ assert_equal 0, resp
+
+ resp = dc.incr('counter', 10)
+ assert_equal 10, resp
+
+ current = 10
+ 100.times do |x|
+ resp = dc.incr('counter', 10)
+ assert_equal current + ((x+1)*10), resp
+ end
+
+ resp = dc.decr('10billion', 0, 5, 10)
+ # go over the 32-bit mark to verify proper (un)packing
+ resp = dc.incr('10billion', 10_000_000_000)
+ assert_equal 10_000_000_010, resp
+
+ resp = dc.decr('10billion', 1)
+ assert_equal 10_000_000_009, resp
+
+ resp = dc.decr('10billion', 0)
+ assert_equal 10_000_000_009, resp
+
+ resp = dc.incr('10billion', 0)
+ assert_equal 10_000_000_009, resp
+
+ assert_nil dc.incr('DNE', 10)
+ assert_nil dc.decr('DNE', 10)
+
+ resp = dc.incr('big', 100, 5, 0xFFFFFFFFFFFFFFFE)
+ assert_equal 0xFFFFFFFFFFFFFFFE, resp
+ resp = dc.incr('big', 1)
+ assert_equal 0xFFFFFFFFFFFFFFFF, resp
+
+ # rollover the 64-bit value, we'll get something undefined.
+ resp = dc.incr('big', 1)
+ refute_equal 0x10000000000000000, resp
+ dc.reset
+ end
+ end
+
+ it 'support the append and prepend operations' do
+ memcached do |dc|
+ dc.flush
+ assert op_addset_succeeds(dc.set('456', 'xyz', 0, :raw => true))
+ assert_equal true, dc.prepend('456', '0')
+ assert_equal true, dc.append('456', '9')
+ assert_equal '0xyz9', dc.get('456', :raw => true)
+ assert_equal '0xyz9', dc.get('456')
+
+ assert_equal false, dc.append('nonexist', 'abc')
+ assert_equal false, dc.prepend('nonexist', 'abc')
+ end
+ end
+
+ it 'supports replace operation' do
+ memcached do |dc|
+ dc.flush
+ dc.set('key', 'value')
+ assert op_replace_succeeds(dc.replace('key', 'value2'))
+
+ assert_equal 'value2', dc.get('key')
+ end
+ end
+
+ it 'support touch operation' do
+ memcached do |dc|
+ begin
+ dc.flush
+ dc.set 'key', 'value'
+ assert_equal true, dc.touch('key', 10)
+ assert_equal true, dc.touch('key')
+ assert_equal 'value', dc.get('key')
+ assert_nil dc.touch('notexist')
+ rescue Dalli::DalliError => e
+ # This will happen when memcached is in lesser version than 1.4.8
+ assert_equal 'Response error 129: Unknown command', e.message
+ end
+ end
+ end
+
+ it 'support version operation' do
+ memcached do |dc|
+ v = dc.version
+ servers = v.keys
+ assert(servers.any? do |s|
+ v[s] != nil
+ end, "version failed")
+ end
+ end
+
+ it 'allow TCP connections to be configured for keepalive' do
+ memcached(19122, '', :keepalive => true) do |dc|
+ dc.set(:a, 1)
+ ring = dc.send(:ring)
+ server = ring.servers.first
+ socket = server.instance_variable_get('@sock')
+
+ optval = socket.getsockopt(Socket::SOL_SOCKET, Socket::SO_KEEPALIVE)
+ optval = optval.unpack 'i'
+
+ assert_equal true, (optval[0] != 0)
+ end
+ end
+
+ it "pass a simple smoke test" do
+ memcached do |dc|
+ resp = dc.flush
+ refute_nil resp
+ assert_equal [true, true], resp
+
+ assert op_addset_succeeds(dc.set(:foo, 'bar'))
+ assert_equal 'bar', dc.get(:foo)
+
+ resp = dc.get('123')
+ assert_equal nil, resp
+
+ assert op_addset_succeeds(dc.set('123', 'xyz'))
+
+ resp = dc.get('123')
+ assert_equal 'xyz', resp
+
+ assert op_addset_succeeds(dc.set('123', 'abc'))
+
+ dc.prepend('123', '0')
+ dc.append('123', '0')
+
+ assert_raises Dalli::UnmarshalError do
+ resp = dc.get('123')
+ end
+
+ dc.close
+ dc = nil
+
+ dc = Dalli::Client.new('localhost:19122')
+
+ assert op_addset_succeeds(dc.set('456', 'xyz', 0, :raw => true))
+
+ resp = dc.prepend '456', '0'
+ assert_equal true, resp
+
+ resp = dc.append '456', '9'
+ assert_equal true, resp
+
+ resp = dc.get('456', :raw => true)
+ assert_equal '0xyz9', resp
+
+ assert op_addset_succeeds(dc.set('456', false))
+
+ resp = dc.get('456')
+ assert_equal false, resp
+
+ resp = dc.stats
+ assert_equal Hash, resp.class
+
+ dc.close
+ end
+ end
+
+ it "support multithreaded access" do
+ memcached do |cache|
+ cache.flush
+ workers = []
+
+ cache.set('f', 'zzz')
+ assert op_cas_succeeds((cache.cas('f') do |value|
+ value << 'z'
+ end))
+ assert_equal 'zzzz', cache.get('f')
+
+ # Have a bunch of threads perform a bunch of operations at the same time.
+ # Verify the result of each operation to ensure the request and response
+ # are not intermingled between threads.
+ 10.times do
+ workers << Thread.new do
+ 100.times do
+ cache.set('a', 9)
+ cache.set('b', 11)
+ inc = cache.incr('cat', 10, 0, 10)
+ cache.set('f', 'zzz')
+ res = cache.cas('f') do |value|
+ value << 'z'
+ end
+ refute_nil res
+ assert_equal false, cache.add('a', 11)
+ assert_equal({ 'a' => 9, 'b' => 11 }, cache.get_multi(['a', 'b']))
+ inc = cache.incr('cat', 10)
+ assert_equal 0, inc % 5
+ cache.decr('cat', 5)
+ assert_equal 11, cache.get('b')
+
+ assert_equal %w(a b), cache.get_multi('a', 'b', 'c').keys.sort
+
+ end
+ end
+ end
+
+ workers.each { |w| w.join }
+ cache.flush
+ end
+ end
+
+ it "handle namespaced keys" do
+ memcached do |dc|
+ dc = Dalli::Client.new('localhost:19122', :namespace => 'a')
+ dc.set('namespaced', 1)
+ dc2 = Dalli::Client.new('localhost:19122', :namespace => 'b')
+ dc2.set('namespaced', 2)
+ assert_equal 1, dc.get('namespaced')
+ assert_equal 2, dc2.get('namespaced')
+ end
+ end
+
+ it "handle nil namespace" do
+ memcached do |dc|
+ dc = Dalli::Client.new('localhost:19122', :namespace => nil)
+ assert_equal 'key', dc.send(:validate_key, 'key')
+ end
+ end
+
+ it 'truncate cache keys that are too long' do
+ memcached do
+ dc = Dalli::Client.new('localhost:19122', :namespace => 'some:namspace')
+ key = "this cache key is far too long so it must be hashed and truncated and stuff" * 10
+ value = "some value"
+ assert op_addset_succeeds(dc.set(key, value))
+ assert_equal value, dc.get(key)
+ end
+ end
+
+ it "handle namespaced keys in multi_get" do
+ memcached do |dc|
+ dc = Dalli::Client.new('localhost:19122', :namespace => 'a')
+ dc.set('a', 1)
+ dc.set('b', 2)
+ assert_equal({'a' => 1, 'b' => 2}, dc.get_multi('a', 'b'))
+ end
+ end
+
+ it "handle application marshalling issues" do
+ memcached do |dc|
+ old = Dalli.logger
+ Dalli.logger = Logger.new(nil)
+ begin
+ assert_equal false, dc.set('a', Proc.new { true })
+ ensure
+ Dalli.logger = old
+ end
+ end
+ end
+
+ describe 'with compression' do
+ it 'allow large values' do
+ memcached do |dc|
+ dalli = Dalli::Client.new(dc.instance_variable_get(:@servers), :compress => true)
+
+ value = "0"*1024*1024
+ assert_equal false, dc.set('verylarge', value)
+ dalli.set('verylarge', value)
+ end
+ end
+ end
+
+ describe 'in low memory conditions' do
+
+ it 'handle error response correctly' do
+ memcached(19125, '-m 1 -M') do |dc|
+ failed = false
+ value = "1234567890"*100
+ 1_000.times do |idx|
+ begin
+ assert op_addset_succeeds(dc.set(idx, value))
+ rescue Dalli::DalliError
+ failed = true
+ assert((800..960).include?(idx), "unexpected failure on iteration #{idx}")
+ break
+ end
+ end
+ assert failed, 'did not fail under low memory conditions'
+ end
+ end
+
+ it 'fit more values with compression' do
+ memcached(19126, '-m 1 -M') do |dc|
+ dalli = Dalli::Client.new('localhost:19126', :compress => true)
+ failed = false
+ value = "1234567890"*1000
+ 10_000.times do |idx|
+ begin
+ assert op_addset_succeeds(dalli.set(idx, value))
+ rescue Dalli::DalliError
+ failed = true
+ assert((6000..7800).include?(idx), "unexpected failure on iteration #{idx}")
+ break
+ end
+ end
+ assert failed, 'did not fail under low memory conditions'
+ end
+ end
+
+ end
+
+ end
+end
diff --git a/lib/dalli-2.7.2/test/test_encoding.rb b/lib/dalli-2.7.2/test/test_encoding.rb
new file mode 100644
index 000000000..2ab3ece64
--- /dev/null
+++ b/lib/dalli-2.7.2/test/test_encoding.rb
@@ -0,0 +1,32 @@
+# encoding: utf-8
+require 'helper'
+require 'memcached_mock'
+
+describe 'Encoding' do
+
+ describe 'using a live server' do
+ it 'support i18n content' do
+ memcached do |dc|
+ key = 'foo'
+ utf_key = utf8 = 'ƒ©åÍÎ'
+
+ assert dc.set(key, utf8)
+ assert_equal utf8, dc.get(key)
+
+ dc.set(utf_key, utf8)
+ assert_equal utf8, dc.get(utf_key)
+ end
+ end
+
+ it 'support content expiry' do
+ memcached do |dc|
+ key = 'foo'
+ assert dc.set(key, 'bar', 1)
+ assert_equal 'bar', dc.get(key)
+ sleep 1.2
+ assert_equal nil, dc.get(key)
+ end
+ end
+
+ end
+end
diff --git a/lib/dalli-2.7.2/test/test_failover.rb b/lib/dalli-2.7.2/test/test_failover.rb
new file mode 100644
index 000000000..990b6bc31
--- /dev/null
+++ b/lib/dalli-2.7.2/test/test_failover.rb
@@ -0,0 +1,128 @@
+require 'helper'
+
+describe 'failover' do
+
+ describe 'timeouts' do
+ it 'not lead to corrupt sockets' do
+ memcached(29125) do
+ dc = Dalli::Client.new ['localhost:29125']
+ begin
+ Timeout.timeout 0.01 do
+ 1_000.times do
+ dc.set("test_123", {:test => "123"})
+ end
+ flunk("Did not timeout")
+ end
+ rescue Timeout::Error
+ end
+
+ assert_equal({:test => '123'}, dc.get("test_123"))
+ end
+ end
+ end
+
+
+ describe 'assuming some bad servers' do
+
+ it 'silently reconnect if server hiccups' do
+ memcached(29125) do
+ dc = Dalli::Client.new ['localhost:29125']
+ dc.set 'foo', 'bar'
+ foo = dc.get 'foo'
+ assert_equal foo, 'bar'
+
+ memcached_kill(29125)
+ memcached(29125) do
+
+ foo = dc.get 'foo'
+ assert_nil foo
+
+ memcached_kill(29125)
+ end
+ end
+ end
+
+ it 'handle graceful failover' do
+ memcached(29125) do
+ memcached(29126) do
+ dc = Dalli::Client.new ['localhost:29125', 'localhost:29126']
+ dc.set 'foo', 'bar'
+ foo = dc.get 'foo'
+ assert_equal foo, 'bar'
+
+ memcached_kill(29125)
+
+ dc.set 'foo', 'bar'
+ foo = dc.get 'foo'
+ assert_equal foo, 'bar'
+
+ memcached_kill(29126)
+
+ assert_raises Dalli::RingError, :message => "No server available" do
+ dc.set 'foo', 'bar'
+ end
+ end
+ end
+ end
+
+ it 'handle them gracefully in get_multi' do
+ memcached(29125) do
+ memcached(29126) do
+ dc = Dalli::Client.new ['localhost:29125', 'localhost:29126']
+ dc.set 'a', 'a1'
+ result = dc.get_multi ['a']
+ assert_equal result, {'a' => 'a1'}
+
+ memcached_kill(29125)
+
+ result = dc.get_multi ['a']
+ assert_equal result, {'a' => 'a1'}
+ end
+ end
+ end
+
+ it 'handle graceful failover in get_multi' do
+ memcached(29125) do
+ memcached(29126) do
+ dc = Dalli::Client.new ['localhost:29125', 'localhost:29126']
+ dc.set 'foo', 'foo1'
+ dc.set 'bar', 'bar1'
+ result = dc.get_multi ['foo', 'bar']
+ assert_equal result, {'foo' => 'foo1', 'bar' => 'bar1'}
+
+ memcached_kill(29125)
+
+ dc.set 'foo', 'foo1'
+ dc.set 'bar', 'bar1'
+ result = dc.get_multi ['foo', 'bar']
+ assert_equal result, {'foo' => 'foo1', 'bar' => 'bar1'}
+
+ memcached_kill(29126)
+
+ result = dc.get_multi ['foo', 'bar']
+ assert_equal result, {}
+ end
+ end
+ end
+
+ it 'stats it still properly report' do
+ memcached(29125) do
+ memcached(29126) do
+ dc = Dalli::Client.new ['localhost:29125', 'localhost:29126']
+ result = dc.stats
+ assert_instance_of Hash, result['localhost:29125']
+ assert_instance_of Hash, result['localhost:29126']
+
+ memcached_kill(29125)
+
+ dc = Dalli::Client.new ['localhost:29125', 'localhost:29126']
+ result = dc.stats
+ assert_instance_of NilClass, result['localhost:29125']
+ assert_instance_of Hash, result['localhost:29126']
+
+ memcached_kill(29126)
+ end
+ end
+ end
+ end
+end
diff --git a/lib/dalli-2.7.2/test/test_network.rb b/lib/dalli-2.7.2/test/test_network.rb
new file mode 100644
index 000000000..7399c1737
--- /dev/null
+++ b/lib/dalli-2.7.2/test/test_network.rb
@@ -0,0 +1,54 @@
+require 'helper'
+
+describe 'Network' do
+
+ describe 'assuming a bad network' do
+
+ it 'handle no server available' do
+ assert_raises Dalli::RingError, :message => "No server available" do
+ dc = Dalli::Client.new 'localhost:19333'
+ dc.get 'foo'
+ end
+ end
+
+ describe 'with a fake server' do
+ it 'handle connection reset' do
+ memcached_mock(lambda {|sock| sock.close }) do
+ assert_raises Dalli::RingError, :message => "No server available" do
+ dc = Dalli::Client.new('localhost:19123')
+ dc.get('abc')
+ end
+ end
+ end
+
+ it 'handle malformed response' do
+ memcached_mock(lambda {|sock| sock.write('123') }) do
+ assert_raises Dalli::RingError, :message => "No server available" do
+ dc = Dalli::Client.new('localhost:19123')
+ dc.get('abc')
+ end
+ end
+ end
+
+ it 'handle connect timeouts' do
+ memcached_mock(lambda {|sock| sleep(0.6); sock.close }, :delayed_start) do
+ assert_raises Dalli::RingError, :message => "No server available" do
+ dc = Dalli::Client.new('localhost:19123')
+ dc.get('abc')
+ end
+ end
+ end
+
+ it 'handle read timeouts' do
+ memcached_mock(lambda {|sock| sleep(0.6); sock.write('giraffe') }) do
+ assert_raises Dalli::RingError, :message => "No server available" do
+ dc = Dalli::Client.new('localhost:19123')
+ dc.get('abc')
+ end
+ end
+ end
+
+ end
+
+ end
+end
diff --git a/lib/dalli-2.7.2/test/test_rack_session.rb b/lib/dalli-2.7.2/test/test_rack_session.rb
new file mode 100644
index 000000000..657b061a7
--- /dev/null
+++ b/lib/dalli-2.7.2/test/test_rack_session.rb
@@ -0,0 +1,341 @@
+require 'helper'
+
+require 'rack/session/dalli'
+require 'rack/lint'
+require 'rack/mock'
+require 'thread'
+
+describe Rack::Session::Dalli do
+ Rack::Session::Dalli::DEFAULT_OPTIONS[:memcache_server] = 'localhost:19129'
+
+ before do
+ memcached(19129) do
+ end
+
+ # test memcache connection
+ Rack::Session::Dalli.new(incrementor)
+ end
+
+ let(:session_key) { Rack::Session::Dalli::DEFAULT_OPTIONS[:key] }
+ let(:session_match) do
+ /#{session_key}=([0-9a-fA-F]+);/
+ end
+ let(:incrementor_proc) do
+ lambda do |env|
+ env["rack.session"]["counter"] ||= 0
+ env["rack.session"]["counter"] += 1
+ Rack::Response.new(env["rack.session"].inspect).to_a
+ end
+ end
+ let(:drop_session) do
+ Rack::Lint.new(proc do |env|
+ env['rack.session.options'][:drop] = true
+ incrementor_proc.call(env)
+ end)
+ end
+ let(:renew_session) do
+ Rack::Lint.new(proc do |env|
+ env['rack.session.options'][:renew] = true
+ incrementor_proc.call(env)
+ end)
+ end
+ let(:defer_session) do
+ Rack::Lint.new(proc do |env|
+ env['rack.session.options'][:defer] = true
+ incrementor_proc.call(env)
+ end)
+ end
+ let(:skip_session) do
+ Rack::Lint.new(proc do |env|
+ env['rack.session.options'][:skip] = true
+ incrementor_proc.call(env)
+ end)
+ end
+ let(:incrementor) { Rack::Lint.new(incrementor_proc) }
+
+ it "faults on no connection" do
+ assert_raises Dalli::RingError do
+ Rack::Session::Dalli.new(incrementor, :memcache_server => 'nosuchserver')
+ end
+ end
+
+ it "connects to existing server" do
+ assert_silent do
+ rsd = Rack::Session::Dalli.new(incrementor, :namespace => 'test:rack:session')
+ rsd.pool.set('ping', '')
+ end
+ end
+
+ it "passes options to MemCache" do
+ rsd = Rack::Session::Dalli.new(incrementor, :namespace => 'test:rack:session')
+ assert_equal('test:rack:session', rsd.pool.instance_eval { @options[:namespace] })
+ end
+
+ it "creates a new cookie" do
+ rsd = Rack::Session::Dalli.new(incrementor)
+ res = Rack::MockRequest.new(rsd).get("/")
+ assert res["Set-Cookie"].include?("#{session_key}=")
+ assert_equal '{"counter"=>1}', res.body
+ end
+
+ it "determines session from a cookie" do
+ rsd = Rack::Session::Dalli.new(incrementor)
+ req = Rack::MockRequest.new(rsd)
+ res = req.get("/")
+ cookie = res["Set-Cookie"]
+ assert_equal '{"counter"=>2}', req.get("/", "HTTP_COOKIE" => cookie).body
+ assert_equal '{"counter"=>3}', req.get("/", "HTTP_COOKIE" => cookie).body
+ end
+
+ it "determines session only from a cookie by default" do
+ rsd = Rack::Session::Dalli.new(incrementor)
+ req = Rack::MockRequest.new(rsd)
+ res = req.get("/")
+ sid = res["Set-Cookie"][session_match, 1]
+ assert_equal '{"counter"=>1}', req.get("/?rack.session=#{sid}").body
+ assert_equal '{"counter"=>1}', req.get("/?rack.session=#{sid}").body
+ end
+
+ it "determines session from params" do
+ rsd = Rack::Session::Dalli.new(incrementor, :cookie_only => false)
+ req = Rack::MockRequest.new(rsd)
+ res = req.get("/")
+ sid = res["Set-Cookie"][session_match, 1]
+ assert_equal '{"counter"=>2}', req.get("/?rack.session=#{sid}").body
+ assert_equal '{"counter"=>3}', req.get("/?rack.session=#{sid}").body
+ end
+
+ it "survives nonexistant cookies" do
+ bad_cookie = "rack.session=blarghfasel"
+ rsd = Rack::Session::Dalli.new(incrementor)
+ res = Rack::MockRequest.new(rsd).
+ get("/", "HTTP_COOKIE" => bad_cookie)
+ assert_equal '{"counter"=>1}', res.body
+ cookie = res["Set-Cookie"][session_match]
+ refute_match(/#{bad_cookie}/, cookie)
+ end
+
+ it "survives nonexistant blank cookies" do
+ bad_cookie = "rack.session="
+ rsd = Rack::Session::Dalli.new(incrementor)
+ res = Rack::MockRequest.new(rsd).
+ get("/", "HTTP_COOKIE" => bad_cookie)
+ cookie = res["Set-Cookie"][session_match]
+ refute_match(/#{bad_cookie}$/, cookie)
+ end
+
+ it "maintains freshness" do
+ rsd = Rack::Session::Dalli.new(incrementor, :expire_after => 3)
+ res = Rack::MockRequest.new(rsd).get('/')
+ assert res.body.include?('"counter"=>1')
+ cookie = res["Set-Cookie"]
+ res = Rack::MockRequest.new(rsd).get('/', "HTTP_COOKIE" => cookie)
+ assert_equal cookie, res["Set-Cookie"]
+ assert res.body.include?('"counter"=>2')
+ puts 'Sleeping to expire session' if $DEBUG
+ sleep 4
+ res = Rack::MockRequest.new(rsd).get('/', "HTTP_COOKIE" => cookie)
+ refute_equal cookie, res["Set-Cookie"]
+ assert res.body.include?('"counter"=>1')
+ end
+
+ it "does not send the same session id if it did not change" do
+ rsd = Rack::Session::Dalli.new(incrementor)
+ req = Rack::MockRequest.new(rsd)
+
+ res0 = req.get("/")
+ cookie = res0["Set-Cookie"][session_match]
+ assert_equal '{"counter"=>1}', res0.body
+
+ res1 = req.get("/", "HTTP_COOKIE" => cookie)
+ assert_nil res1["Set-Cookie"]
+ assert_equal '{"counter"=>2}', res1.body
+
+ res2 = req.get("/", "HTTP_COOKIE" => cookie)
+ assert_nil res2["Set-Cookie"]
+ assert_equal '{"counter"=>3}', res2.body
+ end
+
+ it "deletes cookies with :drop option" do
+ rsd = Rack::Session::Dalli.new(incrementor)
+ req = Rack::MockRequest.new(rsd)
+ drop = Rack::Utils::Context.new(rsd, drop_session)
+ dreq = Rack::MockRequest.new(drop)
+
+ res1 = req.get("/")
+ session = (cookie = res1["Set-Cookie"])[session_match]
+ assert_equal '{"counter"=>1}', res1.body
+
+ res2 = dreq.get("/", "HTTP_COOKIE" => cookie)
+ assert_nil res2["Set-Cookie"]
+ assert_equal '{"counter"=>2}', res2.body
+
+ res3 = req.get("/", "HTTP_COOKIE" => cookie)
+ refute_equal session, res3["Set-Cookie"][session_match]
+ assert_equal '{"counter"=>1}', res3.body
+ end
+
+ it "provides new session id with :renew option" do
+ rsd = Rack::Session::Dalli.new(incrementor)
+ req = Rack::MockRequest.new(rsd)
+ renew = Rack::Utils::Context.new(rsd, renew_session)
+ rreq = Rack::MockRequest.new(renew)
+
+ res1 = req.get("/")
+ session = (cookie = res1["Set-Cookie"])[session_match]
+ assert_equal '{"counter"=>1}', res1.body
+
+ res2 = rreq.get("/", "HTTP_COOKIE" => cookie)
+ new_cookie = res2["Set-Cookie"]
+ new_session = new_cookie[session_match]
+ refute_equal session, new_session
+ assert_equal '{"counter"=>2}', res2.body
+
+ res3 = req.get("/", "HTTP_COOKIE" => new_cookie)
+ assert_equal '{"counter"=>3}', res3.body
+
+ # Old cookie was deleted
+ res4 = req.get("/", "HTTP_COOKIE" => cookie)
+ assert_equal '{"counter"=>1}', res4.body
+ end
+
+ it "omits cookie with :defer option but still updates the state" do
+ rsd = Rack::Session::Dalli.new(incrementor)
+ count = Rack::Utils::Context.new(rsd, incrementor)
+ defer = Rack::Utils::Context.new(rsd, defer_session)
+ dreq = Rack::MockRequest.new(defer)
+ creq = Rack::MockRequest.new(count)
+
+ res0 = dreq.get("/")
+ assert_nil res0["Set-Cookie"]
+ assert_equal '{"counter"=>1}', res0.body
+
+ res0 = creq.get("/")
+ res1 = dreq.get("/", "HTTP_COOKIE" => res0["Set-Cookie"])
+ assert_equal '{"counter"=>2}', res1.body
+ res2 = dreq.get("/", "HTTP_COOKIE" => res0["Set-Cookie"])
+ assert_equal '{"counter"=>3}', res2.body
+ end
+
+ it "omits cookie and state update with :skip option" do
+ rsd = Rack::Session::Dalli.new(incrementor)
+ count = Rack::Utils::Context.new(rsd, incrementor)
+ skip = Rack::Utils::Context.new(rsd, skip_session)
+ sreq = Rack::MockRequest.new(skip)
+ creq = Rack::MockRequest.new(count)
+
+ res0 = sreq.get("/")
+ assert_nil res0["Set-Cookie"]
+ assert_equal '{"counter"=>1}', res0.body
+
+ res0 = creq.get("/")
+ res1 = sreq.get("/", "HTTP_COOKIE" => res0["Set-Cookie"])
+ assert_equal '{"counter"=>2}', res1.body
+ res2 = sreq.get("/", "HTTP_COOKIE" => res0["Set-Cookie"])
+ assert_equal '{"counter"=>2}', res2.body
+ end
+
+ it "updates deep hashes correctly" do
+ hash_check = proc do |env|
+ session = env['rack.session']
+ unless session.include? 'test'
+ session.update :a => :b, :c => { :d => :e },
+ :f => { :g => { :h => :i} }, 'test' => true
+ else
+ session[:f][:g][:h] = :j
+ end
+ [200, {}, [session.inspect]]
+ end
+ rsd = Rack::Session::Dalli.new(hash_check)
+ req = Rack::MockRequest.new(rsd)
+
+ res0 = req.get("/")
+ session_id = (cookie = res0["Set-Cookie"])[session_match, 1]
+ ses0 = rsd.pool.get(session_id, true)
+
+ req.get("/", "HTTP_COOKIE" => cookie)
+ ses1 = rsd.pool.get(session_id, true)
+
+ refute_equal ses0, ses1
+ end
+
+ # anyone know how to do this better?
+ it "cleanly merges sessions when multithreaded" do
+ unless $DEBUG
+ assert_equal 1, 1 # fake assertion to appease the mighty bacon
+ next
+ end
+ warn 'Running multithread test for Session::Dalli'
+ rsd = Rack::Session::Dalli.new(incrementor)
+ req = Rack::MockRequest.new(rsd)
+
+ res = req.get('/')
+ assert_equal '{"counter"=>1}', res.body
+ cookie = res["Set-Cookie"]
+ session_id = cookie[session_match, 1]
+
+ delta_incrementor = lambda do |env|
+ # emulate disconjoinment of threading
+ env['rack.session'] = env['rack.session'].dup
+ Thread.stop
+ env['rack.session'][(Time.now.usec*rand).to_i] = true
+ incrementor.call(env)
+ end
+ tses = Rack::Utils::Context.new rsd, delta_incrementor
+ treq = Rack::MockRequest.new(tses)
+ tnum = rand(7).to_i+5
+ r = Array.new(tnum) do
+ Thread.new(treq) do |run|
+ run.get('/', "HTTP_COOKIE" => cookie, 'rack.multithread' => true)
+ end
+ end.reverse.map{|t| t.run.join.value }
+ r.each do |request|
+ assert_equal cookie, request['Set-Cookie']
+ assert request.body.include?('"counter"=>2')
+ end
+
+ session = rsd.pool.get(session_id)
+ assert_equal tnum+1, session.size # counter
+ assert_equal 2, session['counter'] # meeeh
+
+ tnum = rand(7).to_i+5
+ r = Array.new(tnum) do |i|
+ app = Rack::Utils::Context.new rsd, time_delta
+ req = Rack::MockRequest.new app
+ Thread.new(req) do |run|
+ run.get('/', "HTTP_COOKIE" => cookie, 'rack.multithread' => true)
+ end
+ end.reverse.map{|t| t.run.join.value }
+ r.each do |request|
+ assert_equal cookie, request['Set-Cookie']
+ assert request.body.include?('"counter"=>3')
+ end
+
+ session = rsd.pool.get(session_id)
+ assert_equal tnum+1, session.size
+ assert_equal 3, session['counter']
+
+ drop_counter = proc do |env|
+ env['rack.session'].delete 'counter'
+ env['rack.session']['foo'] = 'bar'
+ [200, {'Content-Type'=>'text/plain'}, env['rack.session'].inspect]
+ end
+ tses = Rack::Utils::Context.new rsd, drop_counter
+ treq = Rack::MockRequest.new(tses)
+ tnum = rand(7).to_i+5
+ r = Array.new(tnum) do
+ Thread.new(treq) do |run|
+ run.get('/', "HTTP_COOKIE" => cookie, 'rack.multithread' => true)
+ end
+ end.reverse.map{|t| t.run.join.value }
+ r.each do |request|
+ assert_equal cookie, request['Set-Cookie']
+ assert request.body.include?('"foo"=>"bar"')
+ end
+
+ session = rsd.pool.get(session_id)
+ assert_equal r.size+1, session.size
+ assert_nil session['counter']
+ assert_equal 'bar', session['foo']
+ end
+end
diff --git a/lib/dalli-2.7.2/test/test_ring.rb b/lib/dalli-2.7.2/test/test_ring.rb
new file mode 100644
index 000000000..de4862a9c
--- /dev/null
+++ b/lib/dalli-2.7.2/test/test_ring.rb
@@ -0,0 +1,85 @@
+require 'helper'
+
+describe 'Ring' do
+
+ describe 'a ring of servers' do
+
+ it "have the continuum sorted by value" do
+ servers = [stub(:hostname => "localhost", :port => "11211", :weight => 1),
+ stub(:hostname => "localhost", :port => "9500", :weight => 1)]
+ ring = Dalli::Ring.new(servers, {})
+ previous_value = 0
+ ring.continuum.each do |entry|
+ assert entry.value > previous_value
+ previous_value = entry.value
+ end
+ end
+
+ it 'raise when no servers are available/defined' do
+ ring = Dalli::Ring.new([], {})
+ assert_raises Dalli::RingError, :message => "No server available" do
+ ring.server_for_key('test')
+ end
+ end
+
+ describe 'containing only a single server' do
+ it "raise correctly when it's not alive" do
+ servers = [
+ Dalli::Server.new("localhost:12345"),
+ ]
+ ring = Dalli::Ring.new(servers, {})
+ assert_raises Dalli::RingError, :message => "No server available" do
+ ring.server_for_key('test')
+ end
+ end
+
+ it "return the server when it's alive" do
+ servers = [
+ Dalli::Server.new("localhost:19191"),
+ ]
+ ring = Dalli::Ring.new(servers, {})
+ memcached(19191) do |mc|
+ ring = mc.send(:ring)
+ assert_equal ring.servers.first.port, ring.server_for_key('test').port
+ end
+ end
+ end
+
+ describe 'containing multiple servers' do
+ it "raise correctly when no server is alive" do
+ servers = [
+ Dalli::Server.new("localhost:12345"),
+ Dalli::Server.new("localhost:12346"),
+ ]
+ ring = Dalli::Ring.new(servers, {})
+ assert_raises Dalli::RingError, :message => "No server available" do
+ ring.server_for_key('test')
+ end
+ end
+
+ it "return an alive server when at least one is alive" do
+ servers = [
+ Dalli::Server.new("localhost:12346"),
+ Dalli::Server.new("localhost:19191"),
+ ]
+ ring = Dalli::Ring.new(servers, {})
+ memcached(19191) do |mc|
+ ring = mc.send(:ring)
+ assert_equal ring.servers.first.port, ring.server_for_key('test').port
+ end
+ end
+ end
+
+ it 'detect when a dead server is up again' do
+ memcached(19997) do
+ down_retry_delay = 0.5
+ dc = Dalli::Client.new(['localhost:19997', 'localhost:19998'], :down_retry_delay => down_retry_delay)
+ assert_equal 1, dc.stats.values.compact.count
+
+ memcached(19998) do
+ assert_equal 2, dc.stats.values.compact.count
+ end
+ end
+ end
+ end
+end
diff --git a/lib/dalli-2.7.2/test/test_sasl.rb b/lib/dalli-2.7.2/test/test_sasl.rb
new file mode 100644
index 000000000..eca91c6a2
--- /dev/null
+++ b/lib/dalli-2.7.2/test/test_sasl.rb
@@ -0,0 +1,110 @@
+require 'helper'
+
+describe 'Sasl' do
+
+ # https://github.com/seattlerb/minitest/issues/298
+ def self.xit(msg, &block)
+ end
+
+ describe 'a server requiring authentication' do
+ before do
+ @server = mock()
+ @server.stubs(:request).returns(true)
+ @server.stubs(:weight).returns(1)
+ @server.stubs(:hostname).returns("localhost")
+ @server.stubs(:port).returns("19124")
+ end
+
+ describe 'without authentication credentials' do
+ before do
+ ENV['MEMCACHE_USERNAME'] = 'foo'
+ ENV['MEMCACHE_PASSWORD'] = 'wrongpwd'
+ end
+
+ after do
+ ENV['MEMCACHE_USERNAME'] = nil
+ ENV['MEMCACHE_PASSWORD'] = nil
+ end
+
+ it 'provide one test that passes' do
+ assert true
+ end
+
+ it 'gracefully handle authentication failures' do
+ memcached(19124, '-S') do |dc|
+ assert_error Dalli::DalliError, /32/ do
+ dc.set('abc', 123)
+ end
+ end
+ end
+ end
+
+ it 'fail SASL authentication with wrong options' do
+ memcached(19124, '-S') do |dc|
+ dc = Dalli::Client.new('localhost:19124', :username => 'foo', :password => 'wrongpwd')
+ assert_error Dalli::DalliError, /32/ do
+ dc.set('abc', 123)
+ end
+ end
+ end
+
+ # OSX: Create a SASL user for the memcached application like so:
+ #
+ # saslpasswd2 -a memcached -c testuser
+ #
+ # with password 'testtest'
+ describe 'in an authenticated environment' do
+ before do
+ ENV['MEMCACHE_USERNAME'] = 'testuser'
+ ENV['MEMCACHE_PASSWORD'] = 'testtest'
+ end
+
+ after do
+ ENV['MEMCACHE_USERNAME'] = nil
+ ENV['MEMCACHE_PASSWORD'] = nil
+ end
+
+ xit 'pass SASL authentication' do
+ memcached(19124, '-S') do |dc|
+ # I get "Dalli::DalliError: Error authenticating: 32" in OSX
+ # but SASL works on Heroku servers. YMMV.
+ assert_equal true, dc.set('abc', 123)
+ assert_equal 123, dc.get('abc')
+ results = dc.stats
+ assert_equal 1, results.size
+ assert_equal 38, results.values.first.size
+ end
+ end
+ end
+
+ xit 'pass SASL authentication with options' do
+ memcached(19124, '-S') do |dc|
+ dc = Dalli::Client.new('localhost:19124', :username => 'testuser', :password => 'testtest')
+ # I get "Dalli::DalliError: Error authenticating: 32" in OSX
+ # but SASL works on Heroku servers. YMMV.
+ assert_equal true, dc.set('abc', 123)
+ assert_equal 123, dc.get('abc')
+ results = dc.stats
+ assert_equal 1, results.size
+ assert_equal 38, results.values.first.size
+ end
+ end
+
+ it 'pass SASL as URI' do
+ Dalli::Server.expects(:new).with("localhost:19124",
+ :username => "testuser", :password => "testtest").returns(@server)
+ dc = Dalli::Client.new('memcached://testuser:testtest@localhost:19124')
+ dc.flush_all
+ end
+
+ it 'pass SASL as ring of URIs' do
+ Dalli::Server.expects(:new).with("localhost:19124",
+ :username => "testuser", :password => "testtest").returns(@server)
+ Dalli::Server.expects(:new).with("otherhost:19125",
+ :username => "testuser2", :password => "testtest2").returns(@server)
+ dc = Dalli::Client.new(['memcached://testuser:testtest@localhost:19124',
+ 'memcached://testuser2:testtest2@otherhost:19125'])
+ dc.flush_all
+ end
+ end
+end
diff --git a/lib/dalli-2.7.2/test/test_serializer.rb b/lib/dalli-2.7.2/test/test_serializer.rb
new file mode 100644
index 000000000..e8118f249
--- /dev/null
+++ b/lib/dalli-2.7.2/test/test_serializer.rb
@@ -0,0 +1,30 @@
+# encoding: utf-8
+require 'helper'
+require 'json'
+require 'memcached_mock'
+
+describe 'Serializer' do
+
+ it 'default to Marshal' do
+ memcached_kill(29198) do |dc|
+ memcache = Dalli::Client.new('127.0.0.1:29198')
+ memcache.set 1,2
+ assert_equal Marshal, memcache.instance_variable_get('@ring').servers.first.serializer
+ end
+ end
+
+ it 'support a custom serializer' do
+ memcached_kill(29198) do |dc|
+ memcache = Dalli::Client.new('127.0.0.1:29198', :serializer => JSON)
+ memcache.set 1,2
+ begin
+ assert_equal JSON, memcache.instance_variable_get('@ring').servers.first.serializer
+
+ memcached(19128) do |newdc|
+ assert newdc.set("json_test", {"foo" => "bar"})
+ assert_equal({"foo" => "bar"}, newdc.get("json_test"))
+ end
+ end
+ end
+ end
+end
diff --git a/lib/dalli-2.7.2/test/test_server.rb b/lib/dalli-2.7.2/test/test_server.rb
new file mode 100644
index 000000000..547587176
--- /dev/null
+++ b/lib/dalli-2.7.2/test/test_server.rb
@@ -0,0 +1,80 @@
+require 'helper'
+
+describe Dalli::Server do
+ describe 'hostname parsing' do
+ it 'handles no port or weight' do
+ s = Dalli::Server.new('localhost')
+ assert_equal 'localhost', s.hostname
+ assert_equal 11211, s.port
+ assert_equal 1, s.weight
+ end
+
+ it 'handles a port, but no weight' do
+ s = Dalli::Server.new('localhost:11212')
+ assert_equal 'localhost', s.hostname
+ assert_equal 11212, s.port
+ assert_equal 1, s.weight
+ end
+
+ it 'handles a port and a weight' do
+ s = Dalli::Server.new('localhost:11212:2')
+ assert_equal 'localhost', s.hostname
+ assert_equal 11212, s.port
+ assert_equal 2, s.weight
+ end
+
+ it 'handles ipv4 addresses' do
+ s = Dalli::Server.new('127.0.0.1')
+ assert_equal '127.0.0.1', s.hostname
+ assert_equal 11211, s.port
+ assert_equal 1, s.weight
+ end
+
+ it 'handles ipv6 addresses' do
+ s = Dalli::Server.new('[::1]')
+ assert_equal '::1', s.hostname
+ assert_equal 11211, s.port
+ assert_equal 1, s.weight
+ end
+
+ it 'handles ipv6 addresses with port' do
+ s = Dalli::Server.new('[::1]:11212')
+ assert_equal '::1', s.hostname
+ assert_equal 11212, s.port
+ assert_equal 1, s.weight
+ end
+
+ it 'handles ipv6 addresses with port and weight' do
+ s = Dalli::Server.new('[::1]:11212:2')
+ assert_equal '::1', s.hostname
+ assert_equal 11212, s.port
+ assert_equal 2, s.weight
+ end
+
+ it 'handles a FQDN' do
+ s = Dalli::Server.new('my.fqdn.com')
+ assert_equal 'my.fqdn.com', s.hostname
+ assert_equal 11211, s.port
+ assert_equal 1, s.weight
+ end
+
+ it 'handles a FQDN with port and weight' do
+ s = Dalli::Server.new('my.fqdn.com:11212:2')
+ assert_equal 'my.fqdn.com', s.hostname
+ assert_equal 11212, s.port
+ assert_equal 2, s.weight
+ end
+ end
+
+ describe 'ttl translation' do
+ it 'does not translate ttls under 30 days' do
+ s = Dalli::Server.new('localhost')
+ assert_equal s.send(:sanitize_ttl, 30*24*60*60), 30*24*60*60
+ end
+
+ it 'translates ttls over 30 days into timestamps' do
+ s = Dalli::Server.new('localhost')
+ assert_equal s.send(:sanitize_ttl, 30*24*60*60 + 1), Time.now.to_i + 30*24*60*60+1
+ end
+ end
+end
diff --git a/lib/redmine/notifiable.rb b/lib/redmine/notifiable.rb
index 42a81980f..26ad31966 100644
--- a/lib/redmine/notifiable.rb
+++ b/lib/redmine/notifiable.rb
@@ -20,9 +20,7 @@ module Redmine
notifications << Notifiable.new('message_posted')
notifications << Notifiable.new('wiki_content_added')
notifications << Notifiable.new('wiki_content_updated')
- notifications << Notifiable.new('forum_add')
- notifications << Notifiable.new('forum_message_added', 'forum_add')
- notifications
+ notifications << Notifiable.new('forum_message_added')
end
end
end