From c0d8f7f95690369362b2d068319dbd57caeb4632 Mon Sep 17 00:00:00 2001
From: densetsu9 <78060710+densetsu9@users.noreply.github.com>
Date: Fri, 2 Aug 2024 14:56:11 -0400
Subject: [PATCH 1/2] Moved take home work from old repository
---
.gitignore | 1 +
DENZIL.md | 62 +
Gemfile | 3 +
Gemfile.lock | 11 +
app/assets/javascripts/likes.js | 37 +
app/controllers/pages_controller.rb | 26 +
app/models/like.rb | 23 +
app/models/user.rb | 31 +-
app/repos/hacker_news_scraper.rb | 81 +
app/repos/home_page_collator.rb | 52 +
app/repos/like_repo.rb | 62 +
app/repos/liked_page_collator.rb | 48 +
app/views/pages/_header.html.erb | 17 +
app/views/pages/_story_row.html.erb | 16 +
app/views/pages/home.html.erb | 10 +
app/views/pages/liked_index.html.erb | 11 +
config/application.rb | 1 +
config/environments/test.rb | 2 +-
config/initializers/devise.rb | 2 +-
config/routes.rb | 5 +-
.../20240801001753_create_like_table.rb | 11 +
db/schema.rb | 12 +-
denzil.patch | 1686 +++++++++++++++++
.../vcr_cassettes/story_details_41119080.yml | 43 +
.../vcr_cassettes/story_details_41119443.yml | 46 +
.../vcr_cassettes/story_details_multi.yml | 126 ++
fixtures/vcr_cassettes/top_stories.yml | 42 +
.../vcr_cassettes/top_story_full_refresh.yml | 164 ++
lib/tasks/auto_annotate_models.rake | 59 +
spec/models/like_spec.rb | 31 +
spec/models/user_spec.rb | 44 +-
spec/rails_helper.rb | 26 +-
spec/repos/hacker_news_scraper_spec.rb | 154 ++
spec/repos/like_repo_spec.rb | 120 ++
34 files changed, 3047 insertions(+), 18 deletions(-)
create mode 100644 DENZIL.md
create mode 100644 app/assets/javascripts/likes.js
create mode 100644 app/models/like.rb
create mode 100644 app/repos/hacker_news_scraper.rb
create mode 100644 app/repos/home_page_collator.rb
create mode 100644 app/repos/like_repo.rb
create mode 100644 app/repos/liked_page_collator.rb
create mode 100644 app/views/pages/_header.html.erb
create mode 100644 app/views/pages/_story_row.html.erb
create mode 100644 app/views/pages/liked_index.html.erb
create mode 100644 db/migrate/20240801001753_create_like_table.rb
create mode 100644 denzil.patch
create mode 100644 fixtures/vcr_cassettes/story_details_41119080.yml
create mode 100644 fixtures/vcr_cassettes/story_details_41119443.yml
create mode 100644 fixtures/vcr_cassettes/story_details_multi.yml
create mode 100644 fixtures/vcr_cassettes/top_stories.yml
create mode 100644 fixtures/vcr_cassettes/top_story_full_refresh.yml
create mode 100644 lib/tasks/auto_annotate_models.rake
create mode 100644 spec/models/like_spec.rb
create mode 100644 spec/repos/hacker_news_scraper_spec.rb
create mode 100644 spec/repos/like_repo_spec.rb
diff --git a/.gitignore b/.gitignore
index 82701fed..f38d4ede 100644
--- a/.gitignore
+++ b/.gitignore
@@ -17,3 +17,4 @@
/yarn-error.log
.byebug_history
+.vscode
diff --git a/DENZIL.md b/DENZIL.md
new file mode 100644
index 00000000..8019b37b
--- /dev/null
+++ b/DENZIL.md
@@ -0,0 +1,62 @@
+# Denzil Kriekenbeek take home exercise
+
+## Installation problems on Apple M1 Mac Book Air, Sonoma 14.4.1 (23E224)
+
+### nio4r gem failing bundle install:
+ ```
+ gem install nio4r -v 2.5.8 -- --with-cflags="-Wno-incompatible-pointer-types"
+ ```
+
+### Postgres gem install failing bundle install due to Postgres not being installed:
+ ```
+ brew install postgresql
+ brew services start postgresql@14
+ ```
+
+### Wrong version of OpenSSL being used when building Ruby 3.1.2 with ruby-install
+ Add to .zshrc
+ ```
+ export PATH="/opt/homebrew/opt/openssl@1.1/bin:$PATH"
+ export LIBRARY_PATH="$LIBRARY_PATH:/opt/homebrew/opt/openssl@1.1/lib/"
+ export RUBY_CONFIGURE_OPTS="--with-openssl-dir=$(brew --prefix openssl@1.1)"
+ ```
+ `ruby-install ruby-3.1.2`
+
+## Initial Impressions:
+- The Hacker News API requires N+1 requests to populate a page, we'll have to do some significant caching to make this tolerable.
+
+- 1st step would be to refresh my memory by reading
+https://guides.rubyonrails.org/caching_with_rails.html
+
+- Might need to enable development (in memory) cache with bin/rails dev:cache, but this would mean production would need an alternate memory store (memcached?). No need to decide now.
+
+## Random thought livestream:
+
+- Any time an API is involved, I reach for the VCR gem; it mocks out external API calls, allowing for deterministic unit tests. Incidentally, while reading the docs, noticed that the supported Typhoeus library can handle parallel requests. Seems applicable to this problem.
+
+- There's a nagging deprecation warning that seems easily fixable.
+
+- Login/logout is the first requirement, and I see the devise gem in the gemfile, so let's get that working next.
+
+- Heh, didn't realize the User table already had all the devise columns until I went to create a migration. *facepalm* As an aside, I was going to add the annotate gem for easy schema reference.
+
+- Now that we can guarantee that users are logged in, the next step is to retrieve Hacker News entries via its API. Will brute force the N+1 request first, then iterate from there.
+
+- My plan is to create a "repository" to abstract away all this API work. But in good TDD practice, I'll start by writing a failing test that lets me design my interface.
+
+- Hmm, the API has an "ask" item that is unhelpfully tagged also typed as a "story". The only difference I see is that an "ask" has a text field, where a real "story" does not. But I suppose for this exercise we only care about titles.
+
+- Whoops neglected this file: Got my brute force scraper working. Piped that output to home page. Added a like button to each row... Next step is to make it do something, which involves creating table for this data to live in.
+
+- I added low level cacheing for the scrape results. The likes would have to be dynamic, so there would have to be some collation of the data sets. Hence the introduction of my collator classes. I realized that the home page's cache expiration would have to be on the order of minutes, whereas each individual story details cache could live for days. As a result, every piece of information should only be loaded once, keeping our bandwidth low at the expense of some cache space.
+
+- I was considering doing some partial render caching as well, but I also wanted to submit it before EOW :)
+
+- I'm also glad to have included the typhoeus gem to do parallel fetches, which should prevent a heavy waterfall on initial page load.
+
+
+## Final Thoughts:
+- This was a really fun exercise! I haven't used Rails 7 before, so I took the opportunity to acquaint myself with how Stimulus worked. I'm happy with the resulting "SPA"-like experience. I think you'll see that I'm very test driven, and I like to build facades of abstraction that make making tweaks later easier. After building all the tools I needed for the home page, I was able to build the liked page in a few minutes. Thank you, and I hope to hear from you soon!
+
+Sincerely,
+-Denzil
diff --git a/Gemfile b/Gemfile
index 5a8ffc43..2eba0d2a 100644
--- a/Gemfile
+++ b/Gemfile
@@ -2,6 +2,7 @@ source 'https://rubygems.org'
ruby File.read('.ruby-version').chomp
+gem 'annotate', group: :development # reminds us of model schemas
gem 'byebug', platforms: [:mri, :mingw, :x64_mingw], group: [:development, :test]
gem 'capybara', group: [:development, :test]
gem 'coffee-rails'
@@ -17,6 +18,8 @@ gem 'sass-rails'
gem 'selenium-webdriver', group: [:development, :test]
gem 'spring', group: :development
gem 'turbolinks'
+gem 'typhoeus' # parallelizes http requests
gem 'tzinfo-data', platforms: [:mingw, :mswin, :x64_mingw, :jruby]
gem 'uglifier'
+gem "vcr", group: :test # mocks http requests
gem 'web-console', group: :development
diff --git a/Gemfile.lock b/Gemfile.lock
index 14ec6457..72789b6c 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -68,6 +68,9 @@ GEM
tzinfo (~> 2.0)
addressable (2.8.1)
public_suffix (>= 2.0.2, < 6.0)
+ annotate (3.2.0)
+ activerecord (>= 3.2, < 8.0)
+ rake (>= 10.4, < 14.0)
bcrypt (3.1.18)
bindex (0.8.1)
builder (3.2.4)
@@ -101,6 +104,8 @@ GEM
diff-lcs (1.5.0)
digest (3.1.0)
erubi (1.11.0)
+ ethon (0.16.0)
+ ffi (>= 1.15.0)
execjs (2.8.1)
ffi (1.15.5)
globalid (1.0.0)
@@ -239,10 +244,13 @@ GEM
turbolinks (5.2.1)
turbolinks-source (~> 5.2)
turbolinks-source (5.2.0)
+ typhoeus (1.4.1)
+ ethon (>= 0.9.0)
tzinfo (2.0.5)
concurrent-ruby (~> 1.0)
uglifier (4.2.0)
execjs (>= 0.3.0, < 3)
+ vcr (6.2.0)
warden (1.2.9)
rack (>= 2.0.9)
web-console (4.2.0)
@@ -262,6 +270,7 @@ PLATFORMS
ruby
DEPENDENCIES
+ annotate
byebug
capybara
coffee-rails
@@ -277,8 +286,10 @@ DEPENDENCIES
selenium-webdriver
spring
turbolinks
+ typhoeus
tzinfo-data
uglifier
+ vcr
web-console
RUBY VERSION
diff --git a/app/assets/javascripts/likes.js b/app/assets/javascripts/likes.js
new file mode 100644
index 00000000..19cd140b
--- /dev/null
+++ b/app/assets/javascripts/likes.js
@@ -0,0 +1,37 @@
+
+class Likes {
+ constructor() {
+ document.addEventListener("ajax:success", this.ajax_listener);
+ }
+
+ ajax_listener = (event) => {
+ const [data, _status, _xhr] = event.detail;
+ const { cmd, ...json } = data;
+
+ switch (data.cmd) {
+ case "update_story_likes":
+ const { story_id, likers } = json;
+ return this.update_story_likes(story_id, likers);
+ }
+ }
+
+ update_story_likes(story_id, likers) {
+ const storyLikeElementId = "story_likes_" + story_id;
+
+ let newContents = "";
+ if (likers.length > 0) {
+ newContents = "Liked by: "
+ newContents += likers
+ .map(liker => liker.name)
+ .join(", ")
+ }
+
+ this.replace_element(storyLikeElementId, newContents);
+ }
+
+ replace_element(elementId, newContents) {
+ document.getElementById(elementId).innerHTML = newContents;
+ }
+}
+
+new Likes();
diff --git a/app/controllers/pages_controller.rb b/app/controllers/pages_controller.rb
index ce3bf586..0cf85726 100644
--- a/app/controllers/pages_controller.rb
+++ b/app/controllers/pages_controller.rb
@@ -1,2 +1,28 @@
class PagesController < ApplicationController
+ before_action :authenticate_user!
+
+ MAX_STORIES = 20
+
+ def home
+ home_page_data = HomePageCollator.call(limit: MAX_STORIES)
+ render locals: home_page_data
+ end
+
+ def liked_index
+ liked_page_data = LikedPageCollator.call
+ render locals: liked_page_data
+ end
+
+ def like_story
+ story_id = params.require(:story_id)
+ LikeRepo.new(current_user.id)
+ .toggle_like(story_id)
+
+ likers = LikeRepo.fetch_likes(story_id)
+ render json: {
+ cmd: :update_story_likes,
+ story_id: story_id,
+ likers: likers
+ }
+ end
end
diff --git a/app/models/like.rb b/app/models/like.rb
new file mode 100644
index 00000000..8ae5d3b5
--- /dev/null
+++ b/app/models/like.rb
@@ -0,0 +1,23 @@
+# == Schema Information
+#
+# Table name: likes
+#
+# id :bigint not null, primary key
+# active :boolean default(FALSE)
+# created_at :datetime not null
+# updated_at :datetime not null
+# story_id :integer
+# user_id :bigint
+#
+# Indexes
+#
+# index_likes_on_user_id (user_id)
+# index_likes_on_user_id_and_story_id (user_id,story_id) UNIQUE
+#
+class Like < ApplicationRecord
+ belongs_to :user
+
+ def user_name
+ user.full_name
+ end
+end
diff --git a/app/models/user.rb b/app/models/user.rb
index b2091f9a..707177de 100644
--- a/app/models/user.rb
+++ b/app/models/user.rb
@@ -1,6 +1,35 @@
+# == Schema Information
+#
+# Table name: users
+#
+# id :bigint not null, primary key
+# current_sign_in_at :datetime
+# current_sign_in_ip :inet
+# email :string default(""), not null
+# encrypted_password :string default(""), not null
+# first_name :string
+# last_name :string
+# last_sign_in_at :datetime
+# last_sign_in_ip :inet
+# remember_created_at :datetime
+# reset_password_sent_at :datetime
+# reset_password_token :string
+# sign_in_count :integer default(0), not null
+# created_at :datetime not null
+# updated_at :datetime not null
+#
+# Indexes
+#
+# index_users_on_email (email) UNIQUE
+# index_users_on_reset_password_token (reset_password_token) UNIQUE
+#
class User < ApplicationRecord
# Include default devise modules. Others available are:
# :confirmable, :lockable, :timeoutable and :omniauthable
devise :database_authenticatable, :registerable,
- :recoverable, :rememberable, :trackable, :validatable
+ :recoverable, :rememberable, :trackable, :validatable
+
+ def full_name
+ "#{first_name} #{last_name}"
+ end
end
diff --git a/app/repos/hacker_news_scraper.rb b/app/repos/hacker_news_scraper.rb
new file mode 100644
index 00000000..88102aea
--- /dev/null
+++ b/app/repos/hacker_news_scraper.rb
@@ -0,0 +1,81 @@
+class HackerNewsScraper
+ API_ROOT = "https://hacker-news.firebaseio.com/v0/"
+
+ def self.retrieve_top_stories(
+ limit: nil,
+ cache_expiry: 3.minutes,
+ relevant_fields: []
+ )
+ Rails.cache.fetch(
+ :hacker_news_top_stories,
+ expires_in: cache_expiry
+ ) do
+ scraper = new
+ story_ids = scraper.fetch_top_story_ids
+ limit ||= story_ids.size
+
+ scraper
+ .fetch_stories(story_ids.first(limit))
+ .map do |story_details|
+ story_details.slice(*relevant_fields)
+ end
+ end
+ end
+
+ def self.retrieve_story_details(
+ story_id:,
+ cache_expiry: 1.day,
+ relevant_fields: []
+ )
+ Rails.cache.fetch(
+ [:story_details, story_id],
+ expires_in: cache_expiry
+ ) do
+ story_details = new.fetch_story_details(story_id)
+ story_details.slice(*relevant_fields)
+ end
+ end
+
+ def fetch_top_story_ids
+ end_point = "topstories.json"
+ get(end_point)
+ end
+
+ def fetch_story_details(story_id)
+ end_point = story_endpoint(story_id)
+ get(end_point)
+ .symbolize_keys
+ end
+
+ def fetch_stories(story_ids)
+ hydra = Typhoeus::Hydra.new
+ requests = build_hydra_requests(story_ids) do |request|
+ hydra.queue(request)
+ end
+ hydra.run
+
+ requests.map do |request|
+ JSON.parse(request.response.body)
+ .symbolize_keys
+ end
+ end
+
+ private
+
+ def get(api_endpoint)
+ request = Typhoeus.get(API_ROOT + api_endpoint)
+ JSON.parse(request.response_body)
+ end
+
+ def story_endpoint(story_id)
+ "item/#{story_id}.json"
+ end
+
+ def build_hydra_requests(story_ids, &block)
+ story_ids.map do |story_id|
+ api_end_point = API_ROOT + story_endpoint(story_id)
+ Typhoeus::Request.new(api_end_point)
+ .tap { |req| block.call(req) }
+ end
+ end
+end
diff --git a/app/repos/home_page_collator.rb b/app/repos/home_page_collator.rb
new file mode 100644
index 00000000..2b399896
--- /dev/null
+++ b/app/repos/home_page_collator.rb
@@ -0,0 +1,52 @@
+class HomePageCollator
+ def self.call(limit: nil, cache_expiry: nil)
+ repo = new
+ repo.limit = limit if limit.present?
+ repo.cache_expiry = cache_expiry if cache_expiry.present?
+ repo.execute
+ end
+
+ attr_accessor :limit, :cache_expiry
+
+ def initialize
+ @scraper = HackerNewsScraper
+ @like_repo = LikeRepo
+ @limit = nil
+ @cache_expiry = 3.minutes # top stories will be in constant flux
+ end
+
+ def execute
+ story_data = scrape_news_data
+ story_ids = extract_story_ids(story_data)
+ liker_data = lookup_likes(story_ids)
+
+ {
+ story_data: story_data,
+ liker_data: liker_data
+ }
+ end
+
+ private
+
+ attr_reader :scraper, :like_repo
+
+ def scrape_news_data
+ scraper.retrieve_top_stories(
+ limit: limit,
+ cache_expiry: cache_expiry,
+ relevant_fields: [
+ :id,
+ :title,
+ :url
+ ]
+ )
+ end
+
+ def extract_story_ids(scraped_data)
+ scraped_data.map { |story| story[:id] }
+ end
+
+ def lookup_likes(story_ids)
+ like_repo.fetch_grouped_likes(story_ids)
+ end
+end
diff --git a/app/repos/like_repo.rb b/app/repos/like_repo.rb
new file mode 100644
index 00000000..f5a49b6c
--- /dev/null
+++ b/app/repos/like_repo.rb
@@ -0,0 +1,62 @@
+class LikeRepo
+ def initialize(user_id)
+ @user_id = user_id
+ end
+
+ def toggle_like(story_id)
+ like = Like.find_or_create_by(
+ story_id: story_id,
+ user_id: user_id
+ )
+ like.toggle!(:active)
+ self
+ end
+
+ def self.fetch_grouped_likes(story_ids = [])
+ likes = story_ids.blank? ?
+ fetch_all_likes :
+ fetch_likes(story_ids)
+
+ likes
+ .group_by { |like| like[:story_id] }
+ .transform_values do |likes|
+ likes
+ .map { |like| like[:name] }
+ .join(", ")
+ end
+ end
+
+ def self.fetch_likes(story_id)
+ likes = Like
+ .includes(:user)
+ .where(
+ story_id: story_id,
+ active: true
+ )
+
+ likes.map { |like| format_like(like) }
+ end
+
+ def self.fetch_all_likes
+ likes = Like
+ .includes(:user)
+ .where(
+ active: true
+ )
+ .order(id: :desc)
+
+ likes.map { |like| format_like(like) }
+ end
+
+ def self.format_like(like)
+ {
+ story_id: like.story_id,
+ user_id: like.user_id,
+ name: like.user_name
+ }
+ end
+
+ private
+
+ attr_reader :user_id
+end
diff --git a/app/repos/liked_page_collator.rb b/app/repos/liked_page_collator.rb
new file mode 100644
index 00000000..9f2f1cfb
--- /dev/null
+++ b/app/repos/liked_page_collator.rb
@@ -0,0 +1,48 @@
+class LikedPageCollator
+ def self.call(cache_expiry: nil)
+ repo = new
+ repo.cache_expiry = cache_expiry if cache_expiry.present?
+ repo.execute
+ end
+
+ attr_accessor :cache_expiry
+
+ def initialize
+ @scraper = HackerNewsScraper
+ @like_repo = LikeRepo
+ @cache_expiry = 1.day # story details won't change often, if at all
+ end
+
+ def execute
+ liker_data = lookup_all_likes
+ story_ids = liker_data.keys
+ scraped_data = scrape_news_data(story_ids)
+
+ {
+ story_data: scraped_data,
+ liker_data: liker_data
+ }
+ end
+
+ private
+
+ attr_reader :scraper, :like_repo
+
+ def lookup_all_likes
+ like_repo.fetch_grouped_likes
+ end
+
+ def scrape_news_data(story_ids)
+ story_ids.map do |story_id|
+ scraper.retrieve_story_details(
+ story_id: story_id,
+ cache_expiry: cache_expiry,
+ relevant_fields: [
+ :id,
+ :title,
+ :url
+ ]
+ )
+ end
+ end
+end
diff --git a/app/views/pages/_header.html.erb b/app/views/pages/_header.html.erb
new file mode 100644
index 00000000..b1fff559
--- /dev/null
+++ b/app/views/pages/_header.html.erb
@@ -0,0 +1,17 @@
+
+
+ <%= link_to "Top Stories", "/" %>
+
+
+ <%= link_to "Liked Stories", "/liked" %>
+
+
+ <%=
+ button_to(
+ "Sign out from #{current_user.email}",
+ destroy_user_session_path,
+ method: :delete
+ )
+ %>
+
+
diff --git a/app/views/pages/_story_row.html.erb b/app/views/pages/_story_row.html.erb
new file mode 100644
index 00000000..eacde18b
--- /dev/null
+++ b/app/views/pages/_story_row.html.erb
@@ -0,0 +1,16 @@
+<% likers.prepend("Liked by: ") if likers.present? %>
+
+
+ <%=
+ button_to "👍",
+ like_story_path(story_id: story[:id]),
+ {
+ method: :post,
+ remote: true,
+ style: "background-color:#44a; cursor: pointer;",
+ }
+ %>
+
<%= story[:title] %>
+
<%= likers %>
+
+
diff --git a/app/views/pages/home.html.erb b/app/views/pages/home.html.erb
index 8bfd8294..bb984a0a 100644
--- a/app/views/pages/home.html.erb
+++ b/app/views/pages/home.html.erb
@@ -1 +1,11 @@
+<%= render "header" %>
Welcome to Top News
+
+<% story_data.each do |story_details| %>
+ <%= render(
+ "story_row",
+ story: story_details,
+ likers: liker_data[story_details[:id]]
+ )%>
+<% end %>
+
diff --git a/app/views/pages/liked_index.html.erb b/app/views/pages/liked_index.html.erb
new file mode 100644
index 00000000..94352193
--- /dev/null
+++ b/app/views/pages/liked_index.html.erb
@@ -0,0 +1,11 @@
+<%= render "header" %>
+All Liked Stories
+
+<% story_data.each do |story_details| %>
+ <%= render(
+ "story_row",
+ story: story_details,
+ likers: liker_data[story_details[:id]]
+ )%>
+<% end %>
+
diff --git a/config/application.rb b/config/application.rb
index dab4cec6..f2b7546e 100644
--- a/config/application.rb
+++ b/config/application.rb
@@ -10,6 +10,7 @@ module Topnews
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.1
+ config.active_record.legacy_connection_handling = false
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
diff --git a/config/environments/test.rb b/config/environments/test.rb
index 8e5cbde5..7b6dc4c0 100644
--- a/config/environments/test.rb
+++ b/config/environments/test.rb
@@ -5,7 +5,7 @@
# test suite. You never need to work with it otherwise. Remember that
# your test database is "scratch space" for the test suite and is wiped
# and recreated between test runs. Don't rely on the data there!
- config.cache_classes = true
+ config.cache_classes = false
# Do not eager load code on boot. This avoids loading your whole application
# just for the purpose of running a single test. If you are using a tool that
diff --git a/config/initializers/devise.rb b/config/initializers/devise.rb
index 962d4a7c..235093e8 100644
--- a/config/initializers/devise.rb
+++ b/config/initializers/devise.rb
@@ -244,7 +244,7 @@
# should add them to the navigational formats lists.
#
# The "*/*" below is required to match Internet Explorer requests.
- # config.navigational_formats = ['*/*', :html]
+ config.navigational_formats = ['*/*', :html, :turbo_stream]
# The default HTTP method used to sign out a resource. Default is :delete.
config.sign_out_via = :delete
diff --git a/config/routes.rb b/config/routes.rb
index c12ef082..5163a68a 100644
--- a/config/routes.rb
+++ b/config/routes.rb
@@ -1,4 +1,7 @@
Rails.application.routes.draw do
devise_for :users
- root to: 'pages#home'
+ root to: "pages#home"
+
+ get "liked", to: "pages#liked_index", as: :liked_index
+ post "like/:story_id", to: "pages#like_story", as: :like_story
end
diff --git a/db/migrate/20240801001753_create_like_table.rb b/db/migrate/20240801001753_create_like_table.rb
new file mode 100644
index 00000000..3f99c6f9
--- /dev/null
+++ b/db/migrate/20240801001753_create_like_table.rb
@@ -0,0 +1,11 @@
+class CreateLikeTable < ActiveRecord::Migration[7.0]
+ def change
+ create_table :likes do |t|
+ t.references :user
+ t.integer :story_id
+ t.index [:user_id, :story_id], unique: true
+ t.boolean :active, default: false
+ t.timestamps
+ end
+ end
+end
diff --git a/db/schema.rb b/db/schema.rb
index acc34f3b..3ec60059 100644
--- a/db/schema.rb
+++ b/db/schema.rb
@@ -10,10 +10,20 @@
#
# It's strongly recommended that you check this file into your version control system.
-ActiveRecord::Schema[7.0].define(version: 2018_02_28_212101) do
+ActiveRecord::Schema[7.0].define(version: 2024_08_01_001753) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
+ create_table "likes", force: :cascade do |t|
+ t.bigint "user_id"
+ t.integer "story_id"
+ t.boolean "active", default: false
+ t.datetime "created_at", null: false
+ t.datetime "updated_at", null: false
+ t.index ["user_id", "story_id"], name: "index_likes_on_user_id_and_story_id", unique: true
+ t.index ["user_id"], name: "index_likes_on_user_id"
+ end
+
create_table "users", force: :cascade do |t|
t.string "first_name"
t.string "last_name"
diff --git a/denzil.patch b/denzil.patch
new file mode 100644
index 00000000..b3668cb8
--- /dev/null
+++ b/denzil.patch
@@ -0,0 +1,1686 @@
+diff --git a/.gitignore b/.gitignore
+index 82701fe..f38d4ed 100644
+--- a/.gitignore
++++ b/.gitignore
+@@ -17,3 +17,4 @@
+ /yarn-error.log
+
+ .byebug_history
++.vscode
+diff --git a/DENZIL.md b/DENZIL.md
+new file mode 100644
+index 0000000..8019b37
+--- /dev/null
++++ b/DENZIL.md
+@@ -0,0 +1,62 @@
++# Denzil Kriekenbeek take home exercise
++
++## Installation problems on Apple M1 Mac Book Air, Sonoma 14.4.1 (23E224)
++
++### nio4r gem failing bundle install:
++ ```
++ gem install nio4r -v 2.5.8 -- --with-cflags="-Wno-incompatible-pointer-types"
++ ```
++
++### Postgres gem install failing bundle install due to Postgres not being installed:
++ ```
++ brew install postgresql
++ brew services start postgresql@14
++ ```
++
++### Wrong version of OpenSSL being used when building Ruby 3.1.2 with ruby-install
++ Add to .zshrc
++ ```
++ export PATH="/opt/homebrew/opt/openssl@1.1/bin:$PATH"
++ export LIBRARY_PATH="$LIBRARY_PATH:/opt/homebrew/opt/openssl@1.1/lib/"
++ export RUBY_CONFIGURE_OPTS="--with-openssl-dir=$(brew --prefix openssl@1.1)"
++ ```
++ `ruby-install ruby-3.1.2`
++
++## Initial Impressions:
++- The Hacker News API requires N+1 requests to populate a page, we'll have to do some significant caching to make this tolerable.
++
++- 1st step would be to refresh my memory by reading
++https://guides.rubyonrails.org/caching_with_rails.html
++
++- Might need to enable development (in memory) cache with bin/rails dev:cache, but this would mean production would need an alternate memory store (memcached?). No need to decide now.
++
++## Random thought livestream:
++
++- Any time an API is involved, I reach for the VCR gem; it mocks out external API calls, allowing for deterministic unit tests. Incidentally, while reading the docs, noticed that the supported Typhoeus library can handle parallel requests. Seems applicable to this problem.
++
++- There's a nagging deprecation warning that seems easily fixable.
++
++- Login/logout is the first requirement, and I see the devise gem in the gemfile, so let's get that working next.
++
++- Heh, didn't realize the User table already had all the devise columns until I went to create a migration. *facepalm* As an aside, I was going to add the annotate gem for easy schema reference.
++
++- Now that we can guarantee that users are logged in, the next step is to retrieve Hacker News entries via its API. Will brute force the N+1 request first, then iterate from there.
++
++- My plan is to create a "repository" to abstract away all this API work. But in good TDD practice, I'll start by writing a failing test that lets me design my interface.
++
++- Hmm, the API has an "ask" item that is unhelpfully tagged also typed as a "story". The only difference I see is that an "ask" has a text field, where a real "story" does not. But I suppose for this exercise we only care about titles.
++
++- Whoops neglected this file: Got my brute force scraper working. Piped that output to home page. Added a like button to each row... Next step is to make it do something, which involves creating table for this data to live in.
++
++- I added low level cacheing for the scrape results. The likes would have to be dynamic, so there would have to be some collation of the data sets. Hence the introduction of my collator classes. I realized that the home page's cache expiration would have to be on the order of minutes, whereas each individual story details cache could live for days. As a result, every piece of information should only be loaded once, keeping our bandwidth low at the expense of some cache space.
++
++- I was considering doing some partial render caching as well, but I also wanted to submit it before EOW :)
++
++- I'm also glad to have included the typhoeus gem to do parallel fetches, which should prevent a heavy waterfall on initial page load.
++
++
++## Final Thoughts:
++- This was a really fun exercise! I haven't used Rails 7 before, so I took the opportunity to acquaint myself with how Stimulus worked. I'm happy with the resulting "SPA"-like experience. I think you'll see that I'm very test driven, and I like to build facades of abstraction that make making tweaks later easier. After building all the tools I needed for the home page, I was able to build the liked page in a few minutes. Thank you, and I hope to hear from you soon!
++
++Sincerely,
++-Denzil
+diff --git a/Gemfile b/Gemfile
+index 5a8ffc4..2eba0d2 100644
+--- a/Gemfile
++++ b/Gemfile
+@@ -2,6 +2,7 @@ source 'https://rubygems.org'
+
+ ruby File.read('.ruby-version').chomp
+
++gem 'annotate', group: :development # reminds us of model schemas
+ gem 'byebug', platforms: [:mri, :mingw, :x64_mingw], group: [:development, :test]
+ gem 'capybara', group: [:development, :test]
+ gem 'coffee-rails'
+@@ -17,6 +18,8 @@ gem 'sass-rails'
+ gem 'selenium-webdriver', group: [:development, :test]
+ gem 'spring', group: :development
+ gem 'turbolinks'
++gem 'typhoeus' # parallelizes http requests
+ gem 'tzinfo-data', platforms: [:mingw, :mswin, :x64_mingw, :jruby]
+ gem 'uglifier'
++gem "vcr", group: :test # mocks http requests
+ gem 'web-console', group: :development
+diff --git a/Gemfile.lock b/Gemfile.lock
+index 14ec645..72789b6 100644
+--- a/Gemfile.lock
++++ b/Gemfile.lock
+@@ -68,6 +68,9 @@ GEM
+ tzinfo (~> 2.0)
+ addressable (2.8.1)
+ public_suffix (>= 2.0.2, < 6.0)
++ annotate (3.2.0)
++ activerecord (>= 3.2, < 8.0)
++ rake (>= 10.4, < 14.0)
+ bcrypt (3.1.18)
+ bindex (0.8.1)
+ builder (3.2.4)
+@@ -101,6 +104,8 @@ GEM
+ diff-lcs (1.5.0)
+ digest (3.1.0)
+ erubi (1.11.0)
++ ethon (0.16.0)
++ ffi (>= 1.15.0)
+ execjs (2.8.1)
+ ffi (1.15.5)
+ globalid (1.0.0)
+@@ -239,10 +244,13 @@ GEM
+ turbolinks (5.2.1)
+ turbolinks-source (~> 5.2)
+ turbolinks-source (5.2.0)
++ typhoeus (1.4.1)
++ ethon (>= 0.9.0)
+ tzinfo (2.0.5)
+ concurrent-ruby (~> 1.0)
+ uglifier (4.2.0)
+ execjs (>= 0.3.0, < 3)
++ vcr (6.2.0)
+ warden (1.2.9)
+ rack (>= 2.0.9)
+ web-console (4.2.0)
+@@ -262,6 +270,7 @@ PLATFORMS
+ ruby
+
+ DEPENDENCIES
++ annotate
+ byebug
+ capybara
+ coffee-rails
+@@ -277,8 +286,10 @@ DEPENDENCIES
+ selenium-webdriver
+ spring
+ turbolinks
++ typhoeus
+ tzinfo-data
+ uglifier
++ vcr
+ web-console
+
+ RUBY VERSION
+diff --git a/app/assets/javascripts/likes.js b/app/assets/javascripts/likes.js
+new file mode 100644
+index 0000000..19cd140
+--- /dev/null
++++ b/app/assets/javascripts/likes.js
+@@ -0,0 +1,37 @@
++
++class Likes {
++ constructor() {
++ document.addEventListener("ajax:success", this.ajax_listener);
++ }
++
++ ajax_listener = (event) => {
++ const [data, _status, _xhr] = event.detail;
++ const { cmd, ...json } = data;
++
++ switch (data.cmd) {
++ case "update_story_likes":
++ const { story_id, likers } = json;
++ return this.update_story_likes(story_id, likers);
++ }
++ }
++
++ update_story_likes(story_id, likers) {
++ const storyLikeElementId = "story_likes_" + story_id;
++
++ let newContents = "";
++ if (likers.length > 0) {
++ newContents = "Liked by: "
++ newContents += likers
++ .map(liker => liker.name)
++ .join(", ")
++ }
++
++ this.replace_element(storyLikeElementId, newContents);
++ }
++
++ replace_element(elementId, newContents) {
++ document.getElementById(elementId).innerHTML = newContents;
++ }
++}
++
++new Likes();
+diff --git a/app/controllers/pages_controller.rb b/app/controllers/pages_controller.rb
+index ce3bf58..0cf8572 100644
+--- a/app/controllers/pages_controller.rb
++++ b/app/controllers/pages_controller.rb
+@@ -1,2 +1,28 @@
+ class PagesController < ApplicationController
++ before_action :authenticate_user!
++
++ MAX_STORIES = 20
++
++ def home
++ home_page_data = HomePageCollator.call(limit: MAX_STORIES)
++ render locals: home_page_data
++ end
++
++ def liked_index
++ liked_page_data = LikedPageCollator.call
++ render locals: liked_page_data
++ end
++
++ def like_story
++ story_id = params.require(:story_id)
++ LikeRepo.new(current_user.id)
++ .toggle_like(story_id)
++
++ likers = LikeRepo.fetch_likes(story_id)
++ render json: {
++ cmd: :update_story_likes,
++ story_id: story_id,
++ likers: likers
++ }
++ end
+ end
+diff --git a/app/models/like.rb b/app/models/like.rb
+new file mode 100644
+index 0000000..8ae5d3b
+--- /dev/null
++++ b/app/models/like.rb
+@@ -0,0 +1,23 @@
++# == Schema Information
++#
++# Table name: likes
++#
++# id :bigint not null, primary key
++# active :boolean default(FALSE)
++# created_at :datetime not null
++# updated_at :datetime not null
++# story_id :integer
++# user_id :bigint
++#
++# Indexes
++#
++# index_likes_on_user_id (user_id)
++# index_likes_on_user_id_and_story_id (user_id,story_id) UNIQUE
++#
++class Like < ApplicationRecord
++ belongs_to :user
++
++ def user_name
++ user.full_name
++ end
++end
+diff --git a/app/models/user.rb b/app/models/user.rb
+index b2091f9..707177d 100644
+--- a/app/models/user.rb
++++ b/app/models/user.rb
+@@ -1,6 +1,35 @@
++# == Schema Information
++#
++# Table name: users
++#
++# id :bigint not null, primary key
++# current_sign_in_at :datetime
++# current_sign_in_ip :inet
++# email :string default(""), not null
++# encrypted_password :string default(""), not null
++# first_name :string
++# last_name :string
++# last_sign_in_at :datetime
++# last_sign_in_ip :inet
++# remember_created_at :datetime
++# reset_password_sent_at :datetime
++# reset_password_token :string
++# sign_in_count :integer default(0), not null
++# created_at :datetime not null
++# updated_at :datetime not null
++#
++# Indexes
++#
++# index_users_on_email (email) UNIQUE
++# index_users_on_reset_password_token (reset_password_token) UNIQUE
++#
+ class User < ApplicationRecord
+ # Include default devise modules. Others available are:
+ # :confirmable, :lockable, :timeoutable and :omniauthable
+ devise :database_authenticatable, :registerable,
+- :recoverable, :rememberable, :trackable, :validatable
++ :recoverable, :rememberable, :trackable, :validatable
++
++ def full_name
++ "#{first_name} #{last_name}"
++ end
+ end
+diff --git a/app/repos/hacker_news_scraper.rb b/app/repos/hacker_news_scraper.rb
+new file mode 100644
+index 0000000..88102ae
+--- /dev/null
++++ b/app/repos/hacker_news_scraper.rb
+@@ -0,0 +1,81 @@
++class HackerNewsScraper
++ API_ROOT = "https://hacker-news.firebaseio.com/v0/"
++
++ def self.retrieve_top_stories(
++ limit: nil,
++ cache_expiry: 3.minutes,
++ relevant_fields: []
++ )
++ Rails.cache.fetch(
++ :hacker_news_top_stories,
++ expires_in: cache_expiry
++ ) do
++ scraper = new
++ story_ids = scraper.fetch_top_story_ids
++ limit ||= story_ids.size
++
++ scraper
++ .fetch_stories(story_ids.first(limit))
++ .map do |story_details|
++ story_details.slice(*relevant_fields)
++ end
++ end
++ end
++
++ def self.retrieve_story_details(
++ story_id:,
++ cache_expiry: 1.day,
++ relevant_fields: []
++ )
++ Rails.cache.fetch(
++ [:story_details, story_id],
++ expires_in: cache_expiry
++ ) do
++ story_details = new.fetch_story_details(story_id)
++ story_details.slice(*relevant_fields)
++ end
++ end
++
++ def fetch_top_story_ids
++ end_point = "topstories.json"
++ get(end_point)
++ end
++
++ def fetch_story_details(story_id)
++ end_point = story_endpoint(story_id)
++ get(end_point)
++ .symbolize_keys
++ end
++
++ def fetch_stories(story_ids)
++ hydra = Typhoeus::Hydra.new
++ requests = build_hydra_requests(story_ids) do |request|
++ hydra.queue(request)
++ end
++ hydra.run
++
++ requests.map do |request|
++ JSON.parse(request.response.body)
++ .symbolize_keys
++ end
++ end
++
++ private
++
++ def get(api_endpoint)
++ request = Typhoeus.get(API_ROOT + api_endpoint)
++ JSON.parse(request.response_body)
++ end
++
++ def story_endpoint(story_id)
++ "item/#{story_id}.json"
++ end
++
++ def build_hydra_requests(story_ids, &block)
++ story_ids.map do |story_id|
++ api_end_point = API_ROOT + story_endpoint(story_id)
++ Typhoeus::Request.new(api_end_point)
++ .tap { |req| block.call(req) }
++ end
++ end
++end
+diff --git a/app/repos/home_page_collator.rb b/app/repos/home_page_collator.rb
+new file mode 100644
+index 0000000..2b39989
+--- /dev/null
++++ b/app/repos/home_page_collator.rb
+@@ -0,0 +1,52 @@
++class HomePageCollator
++ def self.call(limit: nil, cache_expiry: nil)
++ repo = new
++ repo.limit = limit if limit.present?
++ repo.cache_expiry = cache_expiry if cache_expiry.present?
++ repo.execute
++ end
++
++ attr_accessor :limit, :cache_expiry
++
++ def initialize
++ @scraper = HackerNewsScraper
++ @like_repo = LikeRepo
++ @limit = nil
++ @cache_expiry = 3.minutes # top stories will be in constant flux
++ end
++
++ def execute
++ story_data = scrape_news_data
++ story_ids = extract_story_ids(story_data)
++ liker_data = lookup_likes(story_ids)
++
++ {
++ story_data: story_data,
++ liker_data: liker_data
++ }
++ end
++
++ private
++
++ attr_reader :scraper, :like_repo
++
++ def scrape_news_data
++ scraper.retrieve_top_stories(
++ limit: limit,
++ cache_expiry: cache_expiry,
++ relevant_fields: [
++ :id,
++ :title,
++ :url
++ ]
++ )
++ end
++
++ def extract_story_ids(scraped_data)
++ scraped_data.map { |story| story[:id] }
++ end
++
++ def lookup_likes(story_ids)
++ like_repo.fetch_grouped_likes(story_ids)
++ end
++end
+diff --git a/app/repos/like_repo.rb b/app/repos/like_repo.rb
+new file mode 100644
+index 0000000..f5a49b6
+--- /dev/null
++++ b/app/repos/like_repo.rb
+@@ -0,0 +1,62 @@
++class LikeRepo
++ def initialize(user_id)
++ @user_id = user_id
++ end
++
++ def toggle_like(story_id)
++ like = Like.find_or_create_by(
++ story_id: story_id,
++ user_id: user_id
++ )
++ like.toggle!(:active)
++ self
++ end
++
++ def self.fetch_grouped_likes(story_ids = [])
++ likes = story_ids.blank? ?
++ fetch_all_likes :
++ fetch_likes(story_ids)
++
++ likes
++ .group_by { |like| like[:story_id] }
++ .transform_values do |likes|
++ likes
++ .map { |like| like[:name] }
++ .join(", ")
++ end
++ end
++
++ def self.fetch_likes(story_id)
++ likes = Like
++ .includes(:user)
++ .where(
++ story_id: story_id,
++ active: true
++ )
++
++ likes.map { |like| format_like(like) }
++ end
++
++ def self.fetch_all_likes
++ likes = Like
++ .includes(:user)
++ .where(
++ active: true
++ )
++ .order(id: :desc)
++
++ likes.map { |like| format_like(like) }
++ end
++
++ def self.format_like(like)
++ {
++ story_id: like.story_id,
++ user_id: like.user_id,
++ name: like.user_name
++ }
++ end
++
++ private
++
++ attr_reader :user_id
++end
+diff --git a/app/repos/liked_page_collator.rb b/app/repos/liked_page_collator.rb
+new file mode 100644
+index 0000000..9f2f1cf
+--- /dev/null
++++ b/app/repos/liked_page_collator.rb
+@@ -0,0 +1,48 @@
++class LikedPageCollator
++ def self.call(cache_expiry: nil)
++ repo = new
++ repo.cache_expiry = cache_expiry if cache_expiry.present?
++ repo.execute
++ end
++
++ attr_accessor :cache_expiry
++
++ def initialize
++ @scraper = HackerNewsScraper
++ @like_repo = LikeRepo
++ @cache_expiry = 1.day # story details won't change often, if at all
++ end
++
++ def execute
++ liker_data = lookup_all_likes
++ story_ids = liker_data.keys
++ scraped_data = scrape_news_data(story_ids)
++
++ {
++ story_data: scraped_data,
++ liker_data: liker_data
++ }
++ end
++
++ private
++
++ attr_reader :scraper, :like_repo
++
++ def lookup_all_likes
++ like_repo.fetch_grouped_likes
++ end
++
++ def scrape_news_data(story_ids)
++ story_ids.map do |story_id|
++ scraper.retrieve_story_details(
++ story_id: story_id,
++ cache_expiry: cache_expiry,
++ relevant_fields: [
++ :id,
++ :title,
++ :url
++ ]
++ )
++ end
++ end
++end
+diff --git a/app/views/pages/_header.html.erb b/app/views/pages/_header.html.erb
+new file mode 100644
+index 0000000..b1fff55
+--- /dev/null
++++ b/app/views/pages/_header.html.erb
+@@ -0,0 +1,17 @@
++
++
++ <%= link_to "Top Stories", "/" %>
++
++
++ <%= link_to "Liked Stories", "/liked" %>
++
++
++ <%=
++ button_to(
++ "Sign out from #{current_user.email}",
++ destroy_user_session_path,
++ method: :delete
++ )
++ %>
++
++
+diff --git a/app/views/pages/_story_row.html.erb b/app/views/pages/_story_row.html.erb
+new file mode 100644
+index 0000000..eacde18
+--- /dev/null
++++ b/app/views/pages/_story_row.html.erb
+@@ -0,0 +1,16 @@
++<% likers.prepend("Liked by: ") if likers.present? %>
++
++
++ <%=
++ button_to "👍",
++ like_story_path(story_id: story[:id]),
++ {
++ method: :post,
++ remote: true,
++ style: "background-color:#44a; cursor: pointer;",
++ }
++ %>
++
<%= story[:title] %>
++
<%= likers %>
++
++
+diff --git a/app/views/pages/home.html.erb b/app/views/pages/home.html.erb
+index 8bfd829..bb984a0 100644
+--- a/app/views/pages/home.html.erb
++++ b/app/views/pages/home.html.erb
+@@ -1 +1,11 @@
++<%= render "header" %>
+ Welcome to Top News
++
++<% story_data.each do |story_details| %>
++ <%= render(
++ "story_row",
++ story: story_details,
++ likers: liker_data[story_details[:id]]
++ )%>
++<% end %>
++
+diff --git a/app/views/pages/liked_index.html.erb b/app/views/pages/liked_index.html.erb
+new file mode 100644
+index 0000000..9435219
+--- /dev/null
++++ b/app/views/pages/liked_index.html.erb
+@@ -0,0 +1,11 @@
++<%= render "header" %>
++All Liked Stories
++
++<% story_data.each do |story_details| %>
++ <%= render(
++ "story_row",
++ story: story_details,
++ likers: liker_data[story_details[:id]]
++ )%>
++<% end %>
++
+diff --git a/config/application.rb b/config/application.rb
+index dab4cec..f2b7546 100644
+--- a/config/application.rb
++++ b/config/application.rb
+@@ -10,6 +10,7 @@ module Topnews
+ class Application < Rails::Application
+ # Initialize configuration defaults for originally generated Rails version.
+ config.load_defaults 5.1
++ config.active_record.legacy_connection_handling = false
+
+ # Settings in config/environments/* take precedence over those specified here.
+ # Application configuration should go into files in config/initializers
+diff --git a/config/environments/test.rb b/config/environments/test.rb
+index 8e5cbde..7b6dc4c 100644
+--- a/config/environments/test.rb
++++ b/config/environments/test.rb
+@@ -5,7 +5,7 @@ Rails.application.configure do
+ # test suite. You never need to work with it otherwise. Remember that
+ # your test database is "scratch space" for the test suite and is wiped
+ # and recreated between test runs. Don't rely on the data there!
+- config.cache_classes = true
++ config.cache_classes = false
+
+ # Do not eager load code on boot. This avoids loading your whole application
+ # just for the purpose of running a single test. If you are using a tool that
+diff --git a/config/initializers/devise.rb b/config/initializers/devise.rb
+index 962d4a7..235093e 100644
+--- a/config/initializers/devise.rb
++++ b/config/initializers/devise.rb
+@@ -244,7 +244,7 @@ Devise.setup do |config|
+ # should add them to the navigational formats lists.
+ #
+ # The "*/*" below is required to match Internet Explorer requests.
+- # config.navigational_formats = ['*/*', :html]
++ config.navigational_formats = ['*/*', :html, :turbo_stream]
+
+ # The default HTTP method used to sign out a resource. Default is :delete.
+ config.sign_out_via = :delete
+diff --git a/config/routes.rb b/config/routes.rb
+index c12ef08..5163a68 100644
+--- a/config/routes.rb
++++ b/config/routes.rb
+@@ -1,4 +1,7 @@
+ Rails.application.routes.draw do
+ devise_for :users
+- root to: 'pages#home'
++ root to: "pages#home"
++
++ get "liked", to: "pages#liked_index", as: :liked_index
++ post "like/:story_id", to: "pages#like_story", as: :like_story
+ end
+diff --git a/db/migrate/20240801001753_create_like_table.rb b/db/migrate/20240801001753_create_like_table.rb
+new file mode 100644
+index 0000000..3f99c6f
+--- /dev/null
++++ b/db/migrate/20240801001753_create_like_table.rb
+@@ -0,0 +1,11 @@
++class CreateLikeTable < ActiveRecord::Migration[7.0]
++ def change
++ create_table :likes do |t|
++ t.references :user
++ t.integer :story_id
++ t.index [:user_id, :story_id], unique: true
++ t.boolean :active, default: false
++ t.timestamps
++ end
++ end
++end
+diff --git a/db/schema.rb b/db/schema.rb
+index acc34f3..3ec6005 100644
+--- a/db/schema.rb
++++ b/db/schema.rb
+@@ -10,10 +10,20 @@
+ #
+ # It's strongly recommended that you check this file into your version control system.
+
+-ActiveRecord::Schema[7.0].define(version: 2018_02_28_212101) do
++ActiveRecord::Schema[7.0].define(version: 2024_08_01_001753) do
+ # These are extensions that must be enabled in order to support this database
+ enable_extension "plpgsql"
+
++ create_table "likes", force: :cascade do |t|
++ t.bigint "user_id"
++ t.integer "story_id"
++ t.boolean "active", default: false
++ t.datetime "created_at", null: false
++ t.datetime "updated_at", null: false
++ t.index ["user_id", "story_id"], name: "index_likes_on_user_id_and_story_id", unique: true
++ t.index ["user_id"], name: "index_likes_on_user_id"
++ end
++
+ create_table "users", force: :cascade do |t|
+ t.string "first_name"
+ t.string "last_name"
+diff --git a/fixtures/vcr_cassettes/story_details_41119080.yml b/fixtures/vcr_cassettes/story_details_41119080.yml
+new file mode 100644
+index 0000000..1168103
+--- /dev/null
++++ b/fixtures/vcr_cassettes/story_details_41119080.yml
+@@ -0,0 +1,43 @@
++---
++http_interactions:
++- request:
++ method: get
++ uri: https://hacker-news.firebaseio.com/v0/item/41119080.json
++ body:
++ encoding: US-ASCII
++ string: ''
++ headers:
++ User-Agent:
++ - Typhoeus - https://github.com/typhoeus/typhoeus
++ Expect:
++ - ''
++ response:
++ status:
++ code: 200
++ message: OK
++ headers:
++ Server:
++ - nginx
++ Date:
++ - Wed, 31 Jul 2024 19:13:44 GMT
++ Content-Type:
++ - application/json; charset=utf-8
++ Content-Length:
++ - '334'
++ Connection:
++ - keep-alive
++ Access-Control-Allow-Origin:
++ - "*"
++ Cache-Control:
++ - no-cache
++ Strict-Transport-Security:
++ - max-age=31556926; includeSubDomains; preload
++ body:
++ encoding: ASCII-8BIT
++ string: '{"by":"Brajeshwar","descendants":52,"id":41119080,"kids":[41120013,41122192,41121903,41119817,41120734,41121026,41119579,41119896,41119789,41120995,41119829],"score":125,"time":1722433249,"title":"How
++ great was the Great Oxidation Event?","type":"story","url":"https://eos.org/science-updates/how-great-was-the-great-oxidation-event"}'
++ http_version: '1.1'
++ adapter_metadata:
++ effective_url: https://hacker-news.firebaseio.com/v0/item/41119080.json
++ recorded_at: Wed, 31 Jul 2024 19:13:44 GMT
++recorded_with: VCR 6.2.0
+diff --git a/fixtures/vcr_cassettes/story_details_41119443.yml b/fixtures/vcr_cassettes/story_details_41119443.yml
+new file mode 100644
+index 0000000..4a415fa
+--- /dev/null
++++ b/fixtures/vcr_cassettes/story_details_41119443.yml
+@@ -0,0 +1,46 @@
++---
++http_interactions:
++- request:
++ method: get
++ uri: https://hacker-news.firebaseio.com/v0/item/41119443.json
++ body:
++ encoding: US-ASCII
++ string: ''
++ headers:
++ User-Agent:
++ - Typhoeus - https://github.com/typhoeus/typhoeus
++ Expect:
++ - ''
++ response:
++ status:
++ code: 200
++ message: OK
++ headers:
++ Server:
++ - nginx
++ Date:
++ - Wed, 31 Jul 2024 21:10:50 GMT
++ Content-Type:
++ - application/json; charset=utf-8
++ Content-Length:
++ - '5639'
++ Connection:
++ - keep-alive
++ Access-Control-Allow-Origin:
++ - "*"
++ Cache-Control:
++ - no-cache
++ Strict-Transport-Security:
++ - max-age=31556926; includeSubDomains; preload
++ body:
++ encoding: UTF-8
++ string: '{"by":"darweenist","descendants":59,"id":41119443,"kids":[41122773,41123293,41122675],"score":77,"text":"Hey
++ HN! Dawson here from Martin (https://www.trymartin.com).
++ Martin is a better Siri with an LLM brain and deeper integrations with everyday
++ apps.","time":1722435867,"title":"Launch HN: Martin
++ (YC S23) – Using LLMs to Make a Better Siri","type":"story"}'
++ http_version: '1.1'
++ adapter_metadata:
++ effective_url: https://hacker-news.firebaseio.com/v0/item/41119443.json
++ recorded_at: Wed, 31 Jul 2024 21:10:50 GMT
++recorded_with: VCR 6.2.0
+diff --git a/fixtures/vcr_cassettes/story_details_multi.yml b/fixtures/vcr_cassettes/story_details_multi.yml
+new file mode 100644
+index 0000000..6005d5a
+--- /dev/null
++++ b/fixtures/vcr_cassettes/story_details_multi.yml
+@@ -0,0 +1,126 @@
++---
++http_interactions:
++- request:
++ method: get
++ uri: https://hacker-news.firebaseio.com/v0/item/41119080.json
++ body:
++ encoding: US-ASCII
++ string: ''
++ headers:
++ User-Agent:
++ - Typhoeus - https://github.com/typhoeus/typhoeus
++ Expect:
++ - ''
++ response:
++ status:
++ code: 200
++ message: OK
++ headers:
++ Server:
++ - nginx
++ Date:
++ - Wed, 31 Jul 2024 20:48:03 GMT
++ Content-Type:
++ - application/json; charset=utf-8
++ Content-Length:
++ - '343'
++ Connection:
++ - keep-alive
++ Access-Control-Allow-Origin:
++ - "*"
++ Cache-Control:
++ - no-cache
++ Strict-Transport-Security:
++ - max-age=31556926; includeSubDomains; preload
++ body:
++ encoding: UTF-8
++ string: '{"by":"Brajeshwar","descendants":63,"id":41119080,"kids":[41122732,41119817,41121903],"score":143,"time":1722433249,"title":"How
++ great was the Great Oxidation Event?","type":"story","url":"https://eos.org/science-updates/how-great-was-the-great-oxidation-event"}'
++ http_version: '1.1'
++ adapter_metadata:
++ effective_url: https://hacker-news.firebaseio.com/v0/item/41119080.json
++ recorded_at: Wed, 31 Jul 2024 20:48:03 GMT
++- request:
++ method: get
++ uri: https://hacker-news.firebaseio.com/v0/item/41120254.json
++ body:
++ encoding: US-ASCII
++ string: ''
++ headers:
++ User-Agent:
++ - Typhoeus - https://github.com/typhoeus/typhoeus
++ Expect:
++ - ''
++ response:
++ status:
++ code: 200
++ message: OK
++ headers:
++ Server:
++ - nginx
++ Date:
++ - Wed, 31 Jul 2024 20:48:03 GMT
++ Content-Type:
++ - application/json; charset=utf-8
++ Content-Length:
++ - '910'
++ Connection:
++ - keep-alive
++ Access-Control-Allow-Origin:
++ - "*"
++ Cache-Control:
++ - no-cache
++ Strict-Transport-Security:
++ - max-age=31556926; includeSubDomains; preload
++ body:
++ encoding: UTF-8
++ string: '{"by":"BerislavLopac","descendants":236,"id":41120254,"kids":[41123233,41121444,41123268],"score":212,"time":1722440995,"title":"I
++ prefer rST to Markdown","type":"story","url":"https://buttondown.email/hillelwayne/archive/why-i-prefer-rst-to-markdown/"}'
++ http_version: '1.1'
++ adapter_metadata:
++ effective_url: https://hacker-news.firebaseio.com/v0/item/41120254.json
++ recorded_at: Wed, 31 Jul 2024 20:48:03 GMT
++- request:
++ method: get
++ uri: https://hacker-news.firebaseio.com/v0/item/41119443.json
++ body:
++ encoding: US-ASCII
++ string: ''
++ headers:
++ User-Agent:
++ - Typhoeus - https://github.com/typhoeus/typhoeus
++ Expect:
++ - ''
++ response:
++ status:
++ code: 200
++ message: OK
++ headers:
++ Server:
++ - nginx
++ Date:
++ - Wed, 31 Jul 2024 20:48:03 GMT
++ Content-Type:
++ - application/json; charset=utf-8
++ Content-Length:
++ - '5612'
++ Connection:
++ - keep-alive
++ Access-Control-Allow-Origin:
++ - "*"
++ Cache-Control:
++ - no-cache
++ Strict-Transport-Security:
++ - max-age=31556926; includeSubDomains; preload
++ body:
++ encoding: UTF-8
++ string: '{"by":"darweenist","descendants":53,"id":41119443,"kids":[41122773,41122675,41121790],"score":73,"text":"Hey
++ HN! Dawson here from Martin (https://www.trymartin.com).
++ Martin is a better Siri with an LLM brain and deeper integrations with everyday
++ apps.","time":1722435867,"title":"Launch HN: Martin
++ (YC S23) – Using LLMs to Make a Better Siri","type":"story"}'
++ http_version: '1.1'
++ adapter_metadata:
++ effective_url: https://hacker-news.firebaseio.com/v0/item/41119443.json
++ recorded_at: Wed, 31 Jul 2024 20:48:03 GMT
++recorded_with: VCR 6.2.0
+diff --git a/fixtures/vcr_cassettes/top_stories.yml b/fixtures/vcr_cassettes/top_stories.yml
+new file mode 100644
+index 0000000..a433377
+--- /dev/null
++++ b/fixtures/vcr_cassettes/top_stories.yml
+@@ -0,0 +1,42 @@
++---
++http_interactions:
++- request:
++ method: get
++ uri: https://hacker-news.firebaseio.com/v0/topstories.json
++ body:
++ encoding: US-ASCII
++ string: ''
++ headers:
++ User-Agent:
++ - Typhoeus - https://github.com/typhoeus/typhoeus
++ Expect:
++ - ''
++ response:
++ status:
++ code: 200
++ message: OK
++ headers:
++ Server:
++ - nginx
++ Date:
++ - Wed, 31 Jul 2024 18:58:34 GMT
++ Content-Type:
++ - application/json; charset=utf-8
++ Content-Length:
++ - '4501'
++ Connection:
++ - keep-alive
++ Access-Control-Allow-Origin:
++ - "*"
++ Cache-Control:
++ - no-cache
++ Strict-Transport-Security:
++ - max-age=31556926; includeSubDomains; preload
++ body:
++ encoding: ASCII-8BIT
++ string: "[41119080,41120254,41119443]"
++ http_version: '1.1'
++ adapter_metadata:
++ effective_url: https://hacker-news.firebaseio.com/v0/topstories.json
++ recorded_at: Wed, 31 Jul 2024 18:58:34 GMT
++recorded_with: VCR 6.2.0
+diff --git a/fixtures/vcr_cassettes/top_story_full_refresh.yml b/fixtures/vcr_cassettes/top_story_full_refresh.yml
+new file mode 100644
+index 0000000..3a5466d
+--- /dev/null
++++ b/fixtures/vcr_cassettes/top_story_full_refresh.yml
+@@ -0,0 +1,164 @@
++---
++http_interactions:
++- request:
++ method: get
++ uri: https://hacker-news.firebaseio.com/v0/topstories.json
++ body:
++ encoding: US-ASCII
++ string: ''
++ headers:
++ User-Agent:
++ - Typhoeus - https://github.com/typhoeus/typhoeus
++ Expect:
++ - ''
++ response:
++ status:
++ code: 200
++ message: OK
++ headers:
++ Server:
++ - nginx
++ Date:
++ - Wed, 31 Jul 2024 22:02:12 GMT
++ Content-Type:
++ - application/json; charset=utf-8
++ Content-Length:
++ - '4501'
++ Connection:
++ - keep-alive
++ Access-Control-Allow-Origin:
++ - "*"
++ Cache-Control:
++ - no-cache
++ Strict-Transport-Security:
++ - max-age=31556926; includeSubDomains; preload
++ body:
++ encoding: UTF-8
++ string: "[41123155,41119080,41119443]"
++ http_version: '1.1'
++ adapter_metadata:
++ effective_url: https://hacker-news.firebaseio.com/v0/topstories.json
++ recorded_at: Wed, 31 Jul 2024 22:02:12 GMT
++- request:
++ method: get
++ uri: https://hacker-news.firebaseio.com/v0/item/41123155.json
++ body:
++ encoding: US-ASCII
++ string: ''
++ headers:
++ User-Agent:
++ - Typhoeus - https://github.com/typhoeus/typhoeus
++ Expect:
++ - ''
++ response:
++ status:
++ code: 200
++ message: OK
++ headers:
++ Server:
++ - nginx
++ Date:
++ - Wed, 31 Jul 2024 22:02:13 GMT
++ Content-Type:
++ - application/json; charset=utf-8
++ Content-Length:
++ - '483'
++ Connection:
++ - keep-alive
++ Access-Control-Allow-Origin:
++ - "*"
++ Cache-Control:
++ - no-cache
++ Strict-Transport-Security:
++ - max-age=31556926; includeSubDomains; preload
++ body:
++ encoding: UTF-8
++ string: '{"by":"kgwgk","descendants":82,"id":41123155,"kids":[41123621,41123650,41123924],"score":227,"time":1722458071,"title":"Suspicious
++ data pattern in recent Venezuelan election","type":"story","url":"https://statmodeling.stat.columbia.edu/2024/07/31/suspicious-data-pattern-in-recent-venezuelan-election/"}'
++ http_version: '1.1'
++ adapter_metadata:
++ effective_url: https://hacker-news.firebaseio.com/v0/item/41123155.json
++ recorded_at: Wed, 31 Jul 2024 22:02:13 GMT
++- request:
++ method: get
++ uri: https://hacker-news.firebaseio.com/v0/item/41119080.json
++ body:
++ encoding: US-ASCII
++ string: ''
++ headers:
++ User-Agent:
++ - Typhoeus - https://github.com/typhoeus/typhoeus
++ Expect:
++ - ''
++ response:
++ status:
++ code: 200
++ message: OK
++ headers:
++ Server:
++ - nginx
++ Date:
++ - Wed, 31 Jul 2024 22:02:13 GMT
++ Content-Type:
++ - application/json; charset=utf-8
++ Content-Length:
++ - '352'
++ Connection:
++ - keep-alive
++ Access-Control-Allow-Origin:
++ - "*"
++ Cache-Control:
++ - no-cache
++ Strict-Transport-Security:
++ - max-age=31556926; includeSubDomains; preload
++ body:
++ encoding: UTF-8
++ string: '{"by":"Brajeshwar","descendants":63,"id":41119080,"kids":[41122732,41119817,41121903],"score":156,"time":1722433249,"title":"How
++ great was the Great Oxidation Event?","type":"story","url":"https://eos.org/science-updates/how-great-was-the-great-oxidation-event"}'
++ http_version: '1.1'
++ adapter_metadata:
++ effective_url: https://hacker-news.firebaseio.com/v0/item/41119080.json
++ recorded_at: Wed, 31 Jul 2024 22:02:13 GMT
++- request:
++ method: get
++ uri: https://hacker-news.firebaseio.com/v0/item/41119443.json
++ body:
++ encoding: US-ASCII
++ string: ''
++ headers:
++ User-Agent:
++ - Typhoeus - https://github.com/typhoeus/typhoeus
++ Expect:
++ - ''
++ response:
++ status:
++ code: 200
++ message: OK
++ headers:
++ Server:
++ - nginx
++ Date:
++ - Wed, 31 Jul 2024 22:02:13 GMT
++ Content-Type:
++ - application/json; charset=utf-8
++ Content-Length:
++ - '5657'
++ Connection:
++ - keep-alive
++ Access-Control-Allow-Origin:
++ - "*"
++ Cache-Control:
++ - no-cache
++ Strict-Transport-Security:
++ - max-age=31556926; includeSubDomains; preload
++ body:
++ encoding: UTF-8
++ string: '{"by":"darweenist","descendants":64,"id":41119443,"kids":[41122773,41121790,41123293],"score":86,"text":"Hey
++ HN! Dawson here from Martin (https://www.trymartin.com).
++ Martin is a better Siri with an LLM brain and deeper integrations with everyday
++ apps.","time":1722435867,"title":"Launch HN: Martin
++ (YC S23) – Using LLMs to Make a Better Siri","type":"story"}'
++ http_version: '1.1'
++ adapter_metadata:
++ effective_url: https://hacker-news.firebaseio.com/v0/item/41119443.json
++ recorded_at: Wed, 31 Jul 2024 22:02:13 GMT
+diff --git a/lib/tasks/auto_annotate_models.rake b/lib/tasks/auto_annotate_models.rake
+new file mode 100644
+index 0000000..e96283e
+--- /dev/null
++++ b/lib/tasks/auto_annotate_models.rake
+@@ -0,0 +1,59 @@
++# NOTE: only doing this in development as some production environments (Heroku)
++# NOTE: are sensitive to local FS writes, and besides -- it's just not proper
++# NOTE: to have a dev-mode tool do its thing in production.
++if Rails.env.development?
++ require 'annotate'
++ task :set_annotation_options do
++ # You can override any of these by setting an environment variable of the
++ # same name.
++ Annotate.set_defaults(
++ 'active_admin' => 'false',
++ 'additional_file_patterns' => [],
++ 'routes' => 'false',
++ 'models' => 'true',
++ 'position_in_routes' => 'before',
++ 'position_in_class' => 'before',
++ 'position_in_test' => 'before',
++ 'position_in_fixture' => 'before',
++ 'position_in_factory' => 'before',
++ 'position_in_serializer' => 'before',
++ 'show_foreign_keys' => 'true',
++ 'show_complete_foreign_keys' => 'false',
++ 'show_indexes' => 'true',
++ 'simple_indexes' => 'false',
++ 'model_dir' => 'app/models',
++ 'root_dir' => '',
++ 'include_version' => 'false',
++ 'require' => '',
++ 'exclude_tests' => 'false',
++ 'exclude_fixtures' => 'false',
++ 'exclude_factories' => 'false',
++ 'exclude_serializers' => 'false',
++ 'exclude_scaffolds' => 'true',
++ 'exclude_controllers' => 'true',
++ 'exclude_helpers' => 'true',
++ 'exclude_sti_subclasses' => 'false',
++ 'ignore_model_sub_dir' => 'false',
++ 'ignore_columns' => nil,
++ 'ignore_routes' => nil,
++ 'ignore_unknown_models' => 'false',
++ 'hide_limit_column_types' => 'integer,bigint,boolean',
++ 'hide_default_column_types' => 'json,jsonb,hstore',
++ 'skip_on_db_migrate' => 'false',
++ 'format_bare' => 'true',
++ 'format_rdoc' => 'false',
++ 'format_yard' => 'false',
++ 'format_markdown' => 'false',
++ 'sort' => 'false',
++ 'force' => 'false',
++ 'frozen' => 'false',
++ 'classified_sort' => 'true',
++ 'trace' => 'false',
++ 'wrapper_open' => nil,
++ 'wrapper_close' => nil,
++ 'with_comment' => 'true'
++ )
++ end
++
++ Annotate.load_tasks
++end
+diff --git a/spec/models/like_spec.rb b/spec/models/like_spec.rb
+new file mode 100644
+index 0000000..bf8f25e
+--- /dev/null
++++ b/spec/models/like_spec.rb
+@@ -0,0 +1,31 @@
++# == Schema Information
++#
++# Table name: likes
++#
++# id :bigint not null, primary key
++# active :boolean default(FALSE)
++# created_at :datetime not null
++# updated_at :datetime not null
++# story_id :integer
++# user_id :bigint
++#
++# Indexes
++#
++# index_likes_on_user_id (user_id)
++# index_likes_on_user_id_and_story_id (user_id,story_id) UNIQUE
++#
++require "rails_helper"
++
++describe Like do
++ describe "#user_name" do
++ let(:user) { User.new(first_name: "foo", last_name: "bar") }
++
++ it "should be constructed from the liker's first/last name" do
++ like = Like.new({
++ user: user,
++ story_id: 9999
++ })
++ expect(like.user_name).to eq "foo bar"
++ end
++ end
++end
+diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
+index b51dc1c..7505cab 100644
+--- a/spec/models/user_spec.rb
++++ b/spec/models/user_spec.rb
+@@ -1,17 +1,49 @@
+-require 'rails_helper'
++# == Schema Information
++#
++# Table name: users
++#
++# id :bigint not null, primary key
++# current_sign_in_at :datetime
++# current_sign_in_ip :inet
++# email :string default(""), not null
++# encrypted_password :string default(""), not null
++# first_name :string
++# last_name :string
++# last_sign_in_at :datetime
++# last_sign_in_ip :inet
++# remember_created_at :datetime
++# reset_password_sent_at :datetime
++# reset_password_token :string
++# sign_in_count :integer default(0), not null
++# created_at :datetime not null
++# updated_at :datetime not null
++#
++# Indexes
++#
++# index_users_on_email (email) UNIQUE
++# index_users_on_reset_password_token (reset_password_token) UNIQUE
++#
++require "rails_helper"
+
+ describe User do
+- context "creating a new user" do
+- let(:attrs) do
+- { first_name: :foo, last_name: :bar, email: 'f@b.c', password: 'foobar123' }
+- end
++ let(:attrs) do
++ {first_name: :foo, last_name: :bar, email: "f@b.c", password: "foobar123"}
++ end
+
++ context "creating a new user" do
+ it "should have first, last, email" do
+- expect { User.create(attrs) }.to change{ User.count }.by(1)
++ expect { User.create(attrs) }.to change { User.count }.by(1)
+ end
+
+ it "should require a password" do
+ expect(User.new(attrs.except(:password))).to be_invalid
+ end
+ end
++
++ describe "#full_name" do
++ it "should be constructed from first/last name" do
++ full_name = User.new(attrs).full_name
++ expect(full_name).to eq "foo bar"
++ end
++ end
+ end
+diff --git a/spec/rails_helper.rb b/spec/rails_helper.rb
+index bbe1ba5..4758f39 100644
+--- a/spec/rails_helper.rb
++++ b/spec/rails_helper.rb
+@@ -1,10 +1,10 @@
+ # This file is copied to spec/ when you run 'rails generate rspec:install'
+-require 'spec_helper'
+-ENV['RAILS_ENV'] ||= 'test'
+-require File.expand_path('../../config/environment', __FILE__)
++require "spec_helper"
++ENV["RAILS_ENV"] ||= "test"
++require File.expand_path("../../config/environment", __FILE__)
+ # Prevent database truncation if the environment is production
+ abort("The Rails environment is running in production mode!") if Rails.env.production?
+-require 'rspec/rails'
++require "rspec/rails"
+ # Add additional requires below this line. Rails is not loaded until this point!
+
+ # Requires supporting ruby files with custom matchers and macros, etc, in
+@@ -27,9 +27,6 @@ require 'rspec/rails'
+ ActiveRecord::Migration.maintain_test_schema!
+
+ RSpec.configure do |config|
+- # Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
+- config.fixture_path = "#{::Rails.root}/spec/fixtures"
+-
+ # If you're not using ActiveRecord, or you'd prefer not to run each of your
+ # examples within a transaction, remove the following line or assign false
+ # instead of true.
+@@ -55,3 +52,18 @@ RSpec.configure do |config|
+ # arbitrary gems may also be filtered via:
+ # config.filter_gems_from_backtrace("gem name")
+ end
++
++VCR.configure do |config|
++ config.default_cassette_options = {
++ record: :new_episodes,
++ erb: true,
++ decode_compressed_response: true
++ }
++ # will decompress binary responses before writing to cassettes
++ config.before_record do |req|
++ req.response.body.force_encoding("UTF-8")
++ end
++
++ config.cassette_library_dir = "fixtures/vcr_cassettes"
++ config.hook_into :typhoeus
++end
+diff --git a/spec/repos/hacker_news_scraper_spec.rb b/spec/repos/hacker_news_scraper_spec.rb
+new file mode 100644
+index 0000000..cd9c8c9
+--- /dev/null
++++ b/spec/repos/hacker_news_scraper_spec.rb
+@@ -0,0 +1,154 @@
++require "rails_helper"
++
++describe HackerNewsScraper do
++ let(:scraper) { described_class.new }
++
++ describe "#fetch_top_story_ids" do
++ it "should retrieve a json list of story ids" do
++ story_ids = VCR.use_cassette("top_stories") do
++ scraper.fetch_top_story_ids
++ end
++
++ # this cassette was manually trimmed for length
++ expect(story_ids).to match_array [
++ 41119080,
++ 41120254,
++ 41119443
++ ]
++ end
++ end
++
++ describe "#fetch_story_details" do
++ it "should retrieve a specific story's meta data" do
++ story_details = VCR.use_cassette("story_details_41119080") do
++ scraper.fetch_story_details(41119080)
++ end
++
++ expect(story_details).to include(
++ by: "Brajeshwar",
++ descendants: 52,
++ id: 41119080,
++ kids: [41120013, 41122192, 41121903, 41119817, 41120734, 41121026, 41119579, 41119896, 41119789, 41120995, 41119829],
++ score: 125,
++ time: 1722433249,
++ title: "How great was the Great Oxidation Event?",
++ type: "story",
++ url: "https://eos.org/science-updates/how-great-was-the-great-oxidation-event"
++ )
++ end
++
++ it "should be able to interpret a binary response body" do
++ story_details = VCR.use_cassette("story_details_41119443") do
++ scraper.fetch_story_details(41119443)
++ end
++
++ # In order to trim this cassette, I had to set a VCR config option:
++ # req.response.body.force_encoding("UTF-8").
++ # But this endpoint definitely returns a binary string response
++ expect(story_details).to include(
++ by: "darweenist",
++ descendants: 59,
++ id: 41119443,
++ kids: [41122773, 41123293, 41122675],
++ score: 77,
++ text: "Hey HN! Dawson here from Martin (https://www.trymartin.com). Martin is a better Siri with an LLM brain and deeper integrations with everyday apps.",
++ time: 1722435867,
++ title: "Launch HN: Martin (YC S23) – Using LLMs to Make a Better Siri",
++ type: "story"
++ )
++ end
++ end
++
++ describe "#fetch_stories" do
++ it "should retrieve multiple stories' meta data" do
++ multi_story_details = VCR.use_cassette("story_details_multi") do
++ scraper.fetch_stories([
++ 41119080,
++ 41120254,
++ 41119443
++ ])
++ end
++
++ # this cassette was manually trimmed for length
++ expect(multi_story_details).to match_array [
++ {
++ by: "Brajeshwar",
++ descendants: 63,
++ id: 41119080,
++ kids: [41122732, 41119817, 41121903],
++ score: 143,
++ time: 1722433249,
++ title: "How great was the Great Oxidation Event?",
++ type: "story",
++ url: "https://eos.org/science-updates/how-great-was-the-great-oxidation-event"
++ },
++ {
++ by: "BerislavLopac",
++ descendants: 236,
++ id: 41120254,
++ kids: [41123233, 41121444, 41123268],
++ score: 212,
++ time: 1722440995,
++ title: "I prefer rST to Markdown",
++ type: "story",
++ url: "https://buttondown.email/hillelwayne/archive/why-i-prefer-rst-to-markdown/"
++ },
++ {
++ by: "darweenist",
++ descendants: 53,
++ id: 41119443,
++ kids: [41122773, 41122675, 41121790],
++ score: 73,
++ text: "Hey HN! Dawson here from Martin (https://www.trymartin.com). Martin is a better Siri with an LLM brain and deeper integrations with everyday apps.",
++ time: 1722435867,
++ title: "Launch HN: Martin (YC S23) – Using LLMs to Make a Better Siri",
++ type: "story"
++ }
++ ]
++ end
++ end
++
++ describe ".retrieve_top_stories" do
++ it "should do an end to end refresh of top story data" do
++ all_top_stories = VCR.use_cassette("top_story_full_refresh") do
++ described_class.retrieve_top_stories(
++ cache_expiry: 0.seconds,
++ relevant_fields: [:title]
++ )
++ end
++
++ # this cassette was HEAVILY trimmed for length
++ expect(all_top_stories).to match_array [
++ {
++ title: "Suspicious data pattern in recent Venezuelan election"
++ },
++ {
++ title: "How great was the Great Oxidation Event?"
++ },
++ {
++ title: "Launch HN: Martin (YC S23) – Using LLMs to Make a Better Siri"
++ }
++ ]
++ end
++
++ it "can optionally return the top x results" do
++ some_top_stories = VCR.use_cassette("top_story_full_refresh") do
++ described_class.retrieve_top_stories(
++ limit: 2,
++ cache_expiry: 0.seconds,
++ relevant_fields: [:title]
++ )
++ end
++
++ # this cassette was HEAVILY trimmed for length
++ expect(some_top_stories).to match_array [
++ {
++ title: "Suspicious data pattern in recent Venezuelan election"
++ },
++ {
++ title: "How great was the Great Oxidation Event?"
++ }
++ ]
++ end
++ end
++end
+diff --git a/spec/repos/like_repo_spec.rb b/spec/repos/like_repo_spec.rb
+new file mode 100644
+index 0000000..018542c
+--- /dev/null
++++ b/spec/repos/like_repo_spec.rb
+@@ -0,0 +1,120 @@
++require "rails_helper"
++
++describe LikeRepo do
++ let(:user_attrs) do
++ {first_name: :foo, last_name: :bar, email: "f@b.c", password: "foobar123"}
++ end
++
++ let(:user_a) {
++ User.create(
++ **user_attrs, email: "a@example.com", first_name: "user", last_name: "ayy"
++ )
++ }
++ let(:user_b) {
++ User.create(
++ **user_attrs, email: "b@example.com", first_name: "user", last_name: "bee"
++ )
++ }
++ let(:user_c) {
++ User.create(
++ **user_attrs, email: "c@example.com", first_name: "user", last_name: "see"
++ )
++ }
++
++ let(:repo) { described_class.new(user_a.id) }
++
++ describe "#toggle_like" do
++ let(:story_id) { 999 }
++
++ it "should flip from liked to unliked and back on subsequent calls" do
++ expect(described_class.fetch_likes(story_id)).to match_array []
++
++ repo.toggle_like(story_id)
++ expect(described_class.fetch_likes(story_id)).to match_array [
++ {
++ user_id: user_a.id,
++ name: "user ayy",
++ story_id: story_id
++ }
++ ]
++
++ repo.toggle_like(story_id)
++ expect(described_class.fetch_likes(story_id)).to match_array []
++
++ repo.toggle_like(story_id)
++ expect(described_class.fetch_likes(story_id)).to match_array [
++ {
++ user_id: user_a.id,
++ name: "user ayy",
++ story_id: story_id
++ }
++ ]
++ end
++
++ context "with multiple likers" do
++ it "should show multiple names" do
++ LikeRepo.new(user_a.id).toggle_like(story_id)
++ LikeRepo.new(user_b.id).toggle_like(story_id)
++
++ expect(LikeRepo.fetch_likes(story_id)).to match_array [
++ {
++ user_id: user_a.id,
++ name: "user ayy",
++ story_id: story_id
++ },
++ {
++ user_id: user_b.id,
++ name: "user bee",
++ story_id: story_id
++ }
++ ]
++ end
++ end
++ end
++
++ describe ".fetch_grouped_likes" do
++ it "should return a return a hash keyed by story_id" do
++ LikeRepo.new(user_a.id)
++ .toggle_like(100)
++ .toggle_like(200)
++
++ LikeRepo.new(user_b.id)
++ .toggle_like(200)
++ .toggle_like(300)
++
++ # shouldn't be found
++ LikeRepo.new(user_c.id)
++ .toggle_like(500)
++
++ expect(LikeRepo.fetch_grouped_likes(
++ [100, 200, 300, 400]
++ )).to include(
++ 100 => "user ayy",
++ 200 => "user ayy, user bee",
++ 300 => "user bee"
++ )
++ end
++
++ it "will return all likes if not given any story_ids" do
++ LikeRepo.new(user_a.id)
++ .toggle_like(100)
++ .toggle_like(200)
++
++ LikeRepo.new(user_b.id)
++ .toggle_like(200)
++ .toggle_like(300)
++
++ # should be found
++ LikeRepo.new(user_c.id)
++ .toggle_like(500)
++
++ expect(LikeRepo.fetch_grouped_likes)
++ .to include(
++ 100 => "user ayy",
++ 200 => "user bee, user ayy",
++ 300 => "user bee",
++ 500 => "user see"
++ )
++ end
++ end
++end
diff --git a/fixtures/vcr_cassettes/story_details_41119080.yml b/fixtures/vcr_cassettes/story_details_41119080.yml
new file mode 100644
index 00000000..11681039
--- /dev/null
+++ b/fixtures/vcr_cassettes/story_details_41119080.yml
@@ -0,0 +1,43 @@
+---
+http_interactions:
+- request:
+ method: get
+ uri: https://hacker-news.firebaseio.com/v0/item/41119080.json
+ body:
+ encoding: US-ASCII
+ string: ''
+ headers:
+ User-Agent:
+ - Typhoeus - https://github.com/typhoeus/typhoeus
+ Expect:
+ - ''
+ response:
+ status:
+ code: 200
+ message: OK
+ headers:
+ Server:
+ - nginx
+ Date:
+ - Wed, 31 Jul 2024 19:13:44 GMT
+ Content-Type:
+ - application/json; charset=utf-8
+ Content-Length:
+ - '334'
+ Connection:
+ - keep-alive
+ Access-Control-Allow-Origin:
+ - "*"
+ Cache-Control:
+ - no-cache
+ Strict-Transport-Security:
+ - max-age=31556926; includeSubDomains; preload
+ body:
+ encoding: ASCII-8BIT
+ string: '{"by":"Brajeshwar","descendants":52,"id":41119080,"kids":[41120013,41122192,41121903,41119817,41120734,41121026,41119579,41119896,41119789,41120995,41119829],"score":125,"time":1722433249,"title":"How
+ great was the Great Oxidation Event?","type":"story","url":"https://eos.org/science-updates/how-great-was-the-great-oxidation-event"}'
+ http_version: '1.1'
+ adapter_metadata:
+ effective_url: https://hacker-news.firebaseio.com/v0/item/41119080.json
+ recorded_at: Wed, 31 Jul 2024 19:13:44 GMT
+recorded_with: VCR 6.2.0
diff --git a/fixtures/vcr_cassettes/story_details_41119443.yml b/fixtures/vcr_cassettes/story_details_41119443.yml
new file mode 100644
index 00000000..4a415fac
--- /dev/null
+++ b/fixtures/vcr_cassettes/story_details_41119443.yml
@@ -0,0 +1,46 @@
+---
+http_interactions:
+- request:
+ method: get
+ uri: https://hacker-news.firebaseio.com/v0/item/41119443.json
+ body:
+ encoding: US-ASCII
+ string: ''
+ headers:
+ User-Agent:
+ - Typhoeus - https://github.com/typhoeus/typhoeus
+ Expect:
+ - ''
+ response:
+ status:
+ code: 200
+ message: OK
+ headers:
+ Server:
+ - nginx
+ Date:
+ - Wed, 31 Jul 2024 21:10:50 GMT
+ Content-Type:
+ - application/json; charset=utf-8
+ Content-Length:
+ - '5639'
+ Connection:
+ - keep-alive
+ Access-Control-Allow-Origin:
+ - "*"
+ Cache-Control:
+ - no-cache
+ Strict-Transport-Security:
+ - max-age=31556926; includeSubDomains; preload
+ body:
+ encoding: UTF-8
+ string: '{"by":"darweenist","descendants":59,"id":41119443,"kids":[41122773,41123293,41122675],"score":77,"text":"Hey
+ HN! Dawson here from Martin (https://www.trymartin.com).
+ Martin is a better Siri with an LLM brain and deeper integrations with everyday
+ apps.","time":1722435867,"title":"Launch HN: Martin
+ (YC S23) – Using LLMs to Make a Better Siri","type":"story"}'
+ http_version: '1.1'
+ adapter_metadata:
+ effective_url: https://hacker-news.firebaseio.com/v0/item/41119443.json
+ recorded_at: Wed, 31 Jul 2024 21:10:50 GMT
+recorded_with: VCR 6.2.0
diff --git a/fixtures/vcr_cassettes/story_details_multi.yml b/fixtures/vcr_cassettes/story_details_multi.yml
new file mode 100644
index 00000000..6005d5a5
--- /dev/null
+++ b/fixtures/vcr_cassettes/story_details_multi.yml
@@ -0,0 +1,126 @@
+---
+http_interactions:
+- request:
+ method: get
+ uri: https://hacker-news.firebaseio.com/v0/item/41119080.json
+ body:
+ encoding: US-ASCII
+ string: ''
+ headers:
+ User-Agent:
+ - Typhoeus - https://github.com/typhoeus/typhoeus
+ Expect:
+ - ''
+ response:
+ status:
+ code: 200
+ message: OK
+ headers:
+ Server:
+ - nginx
+ Date:
+ - Wed, 31 Jul 2024 20:48:03 GMT
+ Content-Type:
+ - application/json; charset=utf-8
+ Content-Length:
+ - '343'
+ Connection:
+ - keep-alive
+ Access-Control-Allow-Origin:
+ - "*"
+ Cache-Control:
+ - no-cache
+ Strict-Transport-Security:
+ - max-age=31556926; includeSubDomains; preload
+ body:
+ encoding: UTF-8
+ string: '{"by":"Brajeshwar","descendants":63,"id":41119080,"kids":[41122732,41119817,41121903],"score":143,"time":1722433249,"title":"How
+ great was the Great Oxidation Event?","type":"story","url":"https://eos.org/science-updates/how-great-was-the-great-oxidation-event"}'
+ http_version: '1.1'
+ adapter_metadata:
+ effective_url: https://hacker-news.firebaseio.com/v0/item/41119080.json
+ recorded_at: Wed, 31 Jul 2024 20:48:03 GMT
+- request:
+ method: get
+ uri: https://hacker-news.firebaseio.com/v0/item/41120254.json
+ body:
+ encoding: US-ASCII
+ string: ''
+ headers:
+ User-Agent:
+ - Typhoeus - https://github.com/typhoeus/typhoeus
+ Expect:
+ - ''
+ response:
+ status:
+ code: 200
+ message: OK
+ headers:
+ Server:
+ - nginx
+ Date:
+ - Wed, 31 Jul 2024 20:48:03 GMT
+ Content-Type:
+ - application/json; charset=utf-8
+ Content-Length:
+ - '910'
+ Connection:
+ - keep-alive
+ Access-Control-Allow-Origin:
+ - "*"
+ Cache-Control:
+ - no-cache
+ Strict-Transport-Security:
+ - max-age=31556926; includeSubDomains; preload
+ body:
+ encoding: UTF-8
+ string: '{"by":"BerislavLopac","descendants":236,"id":41120254,"kids":[41123233,41121444,41123268],"score":212,"time":1722440995,"title":"I
+ prefer rST to Markdown","type":"story","url":"https://buttondown.email/hillelwayne/archive/why-i-prefer-rst-to-markdown/"}'
+ http_version: '1.1'
+ adapter_metadata:
+ effective_url: https://hacker-news.firebaseio.com/v0/item/41120254.json
+ recorded_at: Wed, 31 Jul 2024 20:48:03 GMT
+- request:
+ method: get
+ uri: https://hacker-news.firebaseio.com/v0/item/41119443.json
+ body:
+ encoding: US-ASCII
+ string: ''
+ headers:
+ User-Agent:
+ - Typhoeus - https://github.com/typhoeus/typhoeus
+ Expect:
+ - ''
+ response:
+ status:
+ code: 200
+ message: OK
+ headers:
+ Server:
+ - nginx
+ Date:
+ - Wed, 31 Jul 2024 20:48:03 GMT
+ Content-Type:
+ - application/json; charset=utf-8
+ Content-Length:
+ - '5612'
+ Connection:
+ - keep-alive
+ Access-Control-Allow-Origin:
+ - "*"
+ Cache-Control:
+ - no-cache
+ Strict-Transport-Security:
+ - max-age=31556926; includeSubDomains; preload
+ body:
+ encoding: UTF-8
+ string: '{"by":"darweenist","descendants":53,"id":41119443,"kids":[41122773,41122675,41121790],"score":73,"text":"Hey
+ HN! Dawson here from Martin (https://www.trymartin.com).
+ Martin is a better Siri with an LLM brain and deeper integrations with everyday
+ apps.","time":1722435867,"title":"Launch HN: Martin
+ (YC S23) – Using LLMs to Make a Better Siri","type":"story"}'
+ http_version: '1.1'
+ adapter_metadata:
+ effective_url: https://hacker-news.firebaseio.com/v0/item/41119443.json
+ recorded_at: Wed, 31 Jul 2024 20:48:03 GMT
+recorded_with: VCR 6.2.0
diff --git a/fixtures/vcr_cassettes/top_stories.yml b/fixtures/vcr_cassettes/top_stories.yml
new file mode 100644
index 00000000..a4333777
--- /dev/null
+++ b/fixtures/vcr_cassettes/top_stories.yml
@@ -0,0 +1,42 @@
+---
+http_interactions:
+- request:
+ method: get
+ uri: https://hacker-news.firebaseio.com/v0/topstories.json
+ body:
+ encoding: US-ASCII
+ string: ''
+ headers:
+ User-Agent:
+ - Typhoeus - https://github.com/typhoeus/typhoeus
+ Expect:
+ - ''
+ response:
+ status:
+ code: 200
+ message: OK
+ headers:
+ Server:
+ - nginx
+ Date:
+ - Wed, 31 Jul 2024 18:58:34 GMT
+ Content-Type:
+ - application/json; charset=utf-8
+ Content-Length:
+ - '4501'
+ Connection:
+ - keep-alive
+ Access-Control-Allow-Origin:
+ - "*"
+ Cache-Control:
+ - no-cache
+ Strict-Transport-Security:
+ - max-age=31556926; includeSubDomains; preload
+ body:
+ encoding: ASCII-8BIT
+ string: "[41119080,41120254,41119443]"
+ http_version: '1.1'
+ adapter_metadata:
+ effective_url: https://hacker-news.firebaseio.com/v0/topstories.json
+ recorded_at: Wed, 31 Jul 2024 18:58:34 GMT
+recorded_with: VCR 6.2.0
diff --git a/fixtures/vcr_cassettes/top_story_full_refresh.yml b/fixtures/vcr_cassettes/top_story_full_refresh.yml
new file mode 100644
index 00000000..3a5466d2
--- /dev/null
+++ b/fixtures/vcr_cassettes/top_story_full_refresh.yml
@@ -0,0 +1,164 @@
+---
+http_interactions:
+- request:
+ method: get
+ uri: https://hacker-news.firebaseio.com/v0/topstories.json
+ body:
+ encoding: US-ASCII
+ string: ''
+ headers:
+ User-Agent:
+ - Typhoeus - https://github.com/typhoeus/typhoeus
+ Expect:
+ - ''
+ response:
+ status:
+ code: 200
+ message: OK
+ headers:
+ Server:
+ - nginx
+ Date:
+ - Wed, 31 Jul 2024 22:02:12 GMT
+ Content-Type:
+ - application/json; charset=utf-8
+ Content-Length:
+ - '4501'
+ Connection:
+ - keep-alive
+ Access-Control-Allow-Origin:
+ - "*"
+ Cache-Control:
+ - no-cache
+ Strict-Transport-Security:
+ - max-age=31556926; includeSubDomains; preload
+ body:
+ encoding: UTF-8
+ string: "[41123155,41119080,41119443]"
+ http_version: '1.1'
+ adapter_metadata:
+ effective_url: https://hacker-news.firebaseio.com/v0/topstories.json
+ recorded_at: Wed, 31 Jul 2024 22:02:12 GMT
+- request:
+ method: get
+ uri: https://hacker-news.firebaseio.com/v0/item/41123155.json
+ body:
+ encoding: US-ASCII
+ string: ''
+ headers:
+ User-Agent:
+ - Typhoeus - https://github.com/typhoeus/typhoeus
+ Expect:
+ - ''
+ response:
+ status:
+ code: 200
+ message: OK
+ headers:
+ Server:
+ - nginx
+ Date:
+ - Wed, 31 Jul 2024 22:02:13 GMT
+ Content-Type:
+ - application/json; charset=utf-8
+ Content-Length:
+ - '483'
+ Connection:
+ - keep-alive
+ Access-Control-Allow-Origin:
+ - "*"
+ Cache-Control:
+ - no-cache
+ Strict-Transport-Security:
+ - max-age=31556926; includeSubDomains; preload
+ body:
+ encoding: UTF-8
+ string: '{"by":"kgwgk","descendants":82,"id":41123155,"kids":[41123621,41123650,41123924],"score":227,"time":1722458071,"title":"Suspicious
+ data pattern in recent Venezuelan election","type":"story","url":"https://statmodeling.stat.columbia.edu/2024/07/31/suspicious-data-pattern-in-recent-venezuelan-election/"}'
+ http_version: '1.1'
+ adapter_metadata:
+ effective_url: https://hacker-news.firebaseio.com/v0/item/41123155.json
+ recorded_at: Wed, 31 Jul 2024 22:02:13 GMT
+- request:
+ method: get
+ uri: https://hacker-news.firebaseio.com/v0/item/41119080.json
+ body:
+ encoding: US-ASCII
+ string: ''
+ headers:
+ User-Agent:
+ - Typhoeus - https://github.com/typhoeus/typhoeus
+ Expect:
+ - ''
+ response:
+ status:
+ code: 200
+ message: OK
+ headers:
+ Server:
+ - nginx
+ Date:
+ - Wed, 31 Jul 2024 22:02:13 GMT
+ Content-Type:
+ - application/json; charset=utf-8
+ Content-Length:
+ - '352'
+ Connection:
+ - keep-alive
+ Access-Control-Allow-Origin:
+ - "*"
+ Cache-Control:
+ - no-cache
+ Strict-Transport-Security:
+ - max-age=31556926; includeSubDomains; preload
+ body:
+ encoding: UTF-8
+ string: '{"by":"Brajeshwar","descendants":63,"id":41119080,"kids":[41122732,41119817,41121903],"score":156,"time":1722433249,"title":"How
+ great was the Great Oxidation Event?","type":"story","url":"https://eos.org/science-updates/how-great-was-the-great-oxidation-event"}'
+ http_version: '1.1'
+ adapter_metadata:
+ effective_url: https://hacker-news.firebaseio.com/v0/item/41119080.json
+ recorded_at: Wed, 31 Jul 2024 22:02:13 GMT
+- request:
+ method: get
+ uri: https://hacker-news.firebaseio.com/v0/item/41119443.json
+ body:
+ encoding: US-ASCII
+ string: ''
+ headers:
+ User-Agent:
+ - Typhoeus - https://github.com/typhoeus/typhoeus
+ Expect:
+ - ''
+ response:
+ status:
+ code: 200
+ message: OK
+ headers:
+ Server:
+ - nginx
+ Date:
+ - Wed, 31 Jul 2024 22:02:13 GMT
+ Content-Type:
+ - application/json; charset=utf-8
+ Content-Length:
+ - '5657'
+ Connection:
+ - keep-alive
+ Access-Control-Allow-Origin:
+ - "*"
+ Cache-Control:
+ - no-cache
+ Strict-Transport-Security:
+ - max-age=31556926; includeSubDomains; preload
+ body:
+ encoding: UTF-8
+ string: '{"by":"darweenist","descendants":64,"id":41119443,"kids":[41122773,41121790,41123293],"score":86,"text":"Hey
+ HN! Dawson here from Martin (https://www.trymartin.com).
+ Martin is a better Siri with an LLM brain and deeper integrations with everyday
+ apps.","time":1722435867,"title":"Launch HN: Martin
+ (YC S23) – Using LLMs to Make a Better Siri","type":"story"}'
+ http_version: '1.1'
+ adapter_metadata:
+ effective_url: https://hacker-news.firebaseio.com/v0/item/41119443.json
+ recorded_at: Wed, 31 Jul 2024 22:02:13 GMT
diff --git a/lib/tasks/auto_annotate_models.rake b/lib/tasks/auto_annotate_models.rake
new file mode 100644
index 00000000..e96283ea
--- /dev/null
+++ b/lib/tasks/auto_annotate_models.rake
@@ -0,0 +1,59 @@
+# NOTE: only doing this in development as some production environments (Heroku)
+# NOTE: are sensitive to local FS writes, and besides -- it's just not proper
+# NOTE: to have a dev-mode tool do its thing in production.
+if Rails.env.development?
+ require 'annotate'
+ task :set_annotation_options do
+ # You can override any of these by setting an environment variable of the
+ # same name.
+ Annotate.set_defaults(
+ 'active_admin' => 'false',
+ 'additional_file_patterns' => [],
+ 'routes' => 'false',
+ 'models' => 'true',
+ 'position_in_routes' => 'before',
+ 'position_in_class' => 'before',
+ 'position_in_test' => 'before',
+ 'position_in_fixture' => 'before',
+ 'position_in_factory' => 'before',
+ 'position_in_serializer' => 'before',
+ 'show_foreign_keys' => 'true',
+ 'show_complete_foreign_keys' => 'false',
+ 'show_indexes' => 'true',
+ 'simple_indexes' => 'false',
+ 'model_dir' => 'app/models',
+ 'root_dir' => '',
+ 'include_version' => 'false',
+ 'require' => '',
+ 'exclude_tests' => 'false',
+ 'exclude_fixtures' => 'false',
+ 'exclude_factories' => 'false',
+ 'exclude_serializers' => 'false',
+ 'exclude_scaffolds' => 'true',
+ 'exclude_controllers' => 'true',
+ 'exclude_helpers' => 'true',
+ 'exclude_sti_subclasses' => 'false',
+ 'ignore_model_sub_dir' => 'false',
+ 'ignore_columns' => nil,
+ 'ignore_routes' => nil,
+ 'ignore_unknown_models' => 'false',
+ 'hide_limit_column_types' => 'integer,bigint,boolean',
+ 'hide_default_column_types' => 'json,jsonb,hstore',
+ 'skip_on_db_migrate' => 'false',
+ 'format_bare' => 'true',
+ 'format_rdoc' => 'false',
+ 'format_yard' => 'false',
+ 'format_markdown' => 'false',
+ 'sort' => 'false',
+ 'force' => 'false',
+ 'frozen' => 'false',
+ 'classified_sort' => 'true',
+ 'trace' => 'false',
+ 'wrapper_open' => nil,
+ 'wrapper_close' => nil,
+ 'with_comment' => 'true'
+ )
+ end
+
+ Annotate.load_tasks
+end
diff --git a/spec/models/like_spec.rb b/spec/models/like_spec.rb
new file mode 100644
index 00000000..bf8f25ee
--- /dev/null
+++ b/spec/models/like_spec.rb
@@ -0,0 +1,31 @@
+# == Schema Information
+#
+# Table name: likes
+#
+# id :bigint not null, primary key
+# active :boolean default(FALSE)
+# created_at :datetime not null
+# updated_at :datetime not null
+# story_id :integer
+# user_id :bigint
+#
+# Indexes
+#
+# index_likes_on_user_id (user_id)
+# index_likes_on_user_id_and_story_id (user_id,story_id) UNIQUE
+#
+require "rails_helper"
+
+describe Like do
+ describe "#user_name" do
+ let(:user) { User.new(first_name: "foo", last_name: "bar") }
+
+ it "should be constructed from the liker's first/last name" do
+ like = Like.new({
+ user: user,
+ story_id: 9999
+ })
+ expect(like.user_name).to eq "foo bar"
+ end
+ end
+end
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index b51dc1c3..7505cabb 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -1,17 +1,49 @@
-require 'rails_helper'
+# == Schema Information
+#
+# Table name: users
+#
+# id :bigint not null, primary key
+# current_sign_in_at :datetime
+# current_sign_in_ip :inet
+# email :string default(""), not null
+# encrypted_password :string default(""), not null
+# first_name :string
+# last_name :string
+# last_sign_in_at :datetime
+# last_sign_in_ip :inet
+# remember_created_at :datetime
+# reset_password_sent_at :datetime
+# reset_password_token :string
+# sign_in_count :integer default(0), not null
+# created_at :datetime not null
+# updated_at :datetime not null
+#
+# Indexes
+#
+# index_users_on_email (email) UNIQUE
+# index_users_on_reset_password_token (reset_password_token) UNIQUE
+#
+require "rails_helper"
describe User do
- context "creating a new user" do
- let(:attrs) do
- { first_name: :foo, last_name: :bar, email: 'f@b.c', password: 'foobar123' }
- end
+ let(:attrs) do
+ {first_name: :foo, last_name: :bar, email: "f@b.c", password: "foobar123"}
+ end
+ context "creating a new user" do
it "should have first, last, email" do
- expect { User.create(attrs) }.to change{ User.count }.by(1)
+ expect { User.create(attrs) }.to change { User.count }.by(1)
end
it "should require a password" do
expect(User.new(attrs.except(:password))).to be_invalid
end
end
+
+ describe "#full_name" do
+ it "should be constructed from first/last name" do
+ full_name = User.new(attrs).full_name
+ expect(full_name).to eq "foo bar"
+ end
+ end
end
diff --git a/spec/rails_helper.rb b/spec/rails_helper.rb
index bbe1ba57..4758f395 100644
--- a/spec/rails_helper.rb
+++ b/spec/rails_helper.rb
@@ -1,10 +1,10 @@
# This file is copied to spec/ when you run 'rails generate rspec:install'
-require 'spec_helper'
-ENV['RAILS_ENV'] ||= 'test'
-require File.expand_path('../../config/environment', __FILE__)
+require "spec_helper"
+ENV["RAILS_ENV"] ||= "test"
+require File.expand_path("../../config/environment", __FILE__)
# Prevent database truncation if the environment is production
abort("The Rails environment is running in production mode!") if Rails.env.production?
-require 'rspec/rails'
+require "rspec/rails"
# Add additional requires below this line. Rails is not loaded until this point!
# Requires supporting ruby files with custom matchers and macros, etc, in
@@ -27,9 +27,6 @@
ActiveRecord::Migration.maintain_test_schema!
RSpec.configure do |config|
- # Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
- config.fixture_path = "#{::Rails.root}/spec/fixtures"
-
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
@@ -55,3 +52,18 @@
# arbitrary gems may also be filtered via:
# config.filter_gems_from_backtrace("gem name")
end
+
+VCR.configure do |config|
+ config.default_cassette_options = {
+ record: :new_episodes,
+ erb: true,
+ decode_compressed_response: true
+ }
+ # will decompress binary responses before writing to cassettes
+ config.before_record do |req|
+ req.response.body.force_encoding("UTF-8")
+ end
+
+ config.cassette_library_dir = "fixtures/vcr_cassettes"
+ config.hook_into :typhoeus
+end
diff --git a/spec/repos/hacker_news_scraper_spec.rb b/spec/repos/hacker_news_scraper_spec.rb
new file mode 100644
index 00000000..cd9c8c9b
--- /dev/null
+++ b/spec/repos/hacker_news_scraper_spec.rb
@@ -0,0 +1,154 @@
+require "rails_helper"
+
+describe HackerNewsScraper do
+ let(:scraper) { described_class.new }
+
+ describe "#fetch_top_story_ids" do
+ it "should retrieve a json list of story ids" do
+ story_ids = VCR.use_cassette("top_stories") do
+ scraper.fetch_top_story_ids
+ end
+
+ # this cassette was manually trimmed for length
+ expect(story_ids).to match_array [
+ 41119080,
+ 41120254,
+ 41119443
+ ]
+ end
+ end
+
+ describe "#fetch_story_details" do
+ it "should retrieve a specific story's meta data" do
+ story_details = VCR.use_cassette("story_details_41119080") do
+ scraper.fetch_story_details(41119080)
+ end
+
+ expect(story_details).to include(
+ by: "Brajeshwar",
+ descendants: 52,
+ id: 41119080,
+ kids: [41120013, 41122192, 41121903, 41119817, 41120734, 41121026, 41119579, 41119896, 41119789, 41120995, 41119829],
+ score: 125,
+ time: 1722433249,
+ title: "How great was the Great Oxidation Event?",
+ type: "story",
+ url: "https://eos.org/science-updates/how-great-was-the-great-oxidation-event"
+ )
+ end
+
+ it "should be able to interpret a binary response body" do
+ story_details = VCR.use_cassette("story_details_41119443") do
+ scraper.fetch_story_details(41119443)
+ end
+
+ # In order to trim this cassette, I had to set a VCR config option:
+ # req.response.body.force_encoding("UTF-8").
+ # But this endpoint definitely returns a binary string response
+ expect(story_details).to include(
+ by: "darweenist",
+ descendants: 59,
+ id: 41119443,
+ kids: [41122773, 41123293, 41122675],
+ score: 77,
+ text: "Hey HN! Dawson here from Martin (https://www.trymartin.com). Martin is a better Siri with an LLM brain and deeper integrations with everyday apps.",
+ time: 1722435867,
+ title: "Launch HN: Martin (YC S23) – Using LLMs to Make a Better Siri",
+ type: "story"
+ )
+ end
+ end
+
+ describe "#fetch_stories" do
+ it "should retrieve multiple stories' meta data" do
+ multi_story_details = VCR.use_cassette("story_details_multi") do
+ scraper.fetch_stories([
+ 41119080,
+ 41120254,
+ 41119443
+ ])
+ end
+
+ # this cassette was manually trimmed for length
+ expect(multi_story_details).to match_array [
+ {
+ by: "Brajeshwar",
+ descendants: 63,
+ id: 41119080,
+ kids: [41122732, 41119817, 41121903],
+ score: 143,
+ time: 1722433249,
+ title: "How great was the Great Oxidation Event?",
+ type: "story",
+ url: "https://eos.org/science-updates/how-great-was-the-great-oxidation-event"
+ },
+ {
+ by: "BerislavLopac",
+ descendants: 236,
+ id: 41120254,
+ kids: [41123233, 41121444, 41123268],
+ score: 212,
+ time: 1722440995,
+ title: "I prefer rST to Markdown",
+ type: "story",
+ url: "https://buttondown.email/hillelwayne/archive/why-i-prefer-rst-to-markdown/"
+ },
+ {
+ by: "darweenist",
+ descendants: 53,
+ id: 41119443,
+ kids: [41122773, 41122675, 41121790],
+ score: 73,
+ text: "Hey HN! Dawson here from Martin (https://www.trymartin.com). Martin is a better Siri with an LLM brain and deeper integrations with everyday apps.",
+ time: 1722435867,
+ title: "Launch HN: Martin (YC S23) – Using LLMs to Make a Better Siri",
+ type: "story"
+ }
+ ]
+ end
+ end
+
+ describe ".retrieve_top_stories" do
+ it "should do an end to end refresh of top story data" do
+ all_top_stories = VCR.use_cassette("top_story_full_refresh") do
+ described_class.retrieve_top_stories(
+ cache_expiry: 0.seconds,
+ relevant_fields: [:title]
+ )
+ end
+
+ # this cassette was HEAVILY trimmed for length
+ expect(all_top_stories).to match_array [
+ {
+ title: "Suspicious data pattern in recent Venezuelan election"
+ },
+ {
+ title: "How great was the Great Oxidation Event?"
+ },
+ {
+ title: "Launch HN: Martin (YC S23) – Using LLMs to Make a Better Siri"
+ }
+ ]
+ end
+
+ it "can optionally return the top x results" do
+ some_top_stories = VCR.use_cassette("top_story_full_refresh") do
+ described_class.retrieve_top_stories(
+ limit: 2,
+ cache_expiry: 0.seconds,
+ relevant_fields: [:title]
+ )
+ end
+
+ # this cassette was HEAVILY trimmed for length
+ expect(some_top_stories).to match_array [
+ {
+ title: "Suspicious data pattern in recent Venezuelan election"
+ },
+ {
+ title: "How great was the Great Oxidation Event?"
+ }
+ ]
+ end
+ end
+end
diff --git a/spec/repos/like_repo_spec.rb b/spec/repos/like_repo_spec.rb
new file mode 100644
index 00000000..018542c6
--- /dev/null
+++ b/spec/repos/like_repo_spec.rb
@@ -0,0 +1,120 @@
+require "rails_helper"
+
+describe LikeRepo do
+ let(:user_attrs) do
+ {first_name: :foo, last_name: :bar, email: "f@b.c", password: "foobar123"}
+ end
+
+ let(:user_a) {
+ User.create(
+ **user_attrs, email: "a@example.com", first_name: "user", last_name: "ayy"
+ )
+ }
+ let(:user_b) {
+ User.create(
+ **user_attrs, email: "b@example.com", first_name: "user", last_name: "bee"
+ )
+ }
+ let(:user_c) {
+ User.create(
+ **user_attrs, email: "c@example.com", first_name: "user", last_name: "see"
+ )
+ }
+
+ let(:repo) { described_class.new(user_a.id) }
+
+ describe "#toggle_like" do
+ let(:story_id) { 999 }
+
+ it "should flip from liked to unliked and back on subsequent calls" do
+ expect(described_class.fetch_likes(story_id)).to match_array []
+
+ repo.toggle_like(story_id)
+ expect(described_class.fetch_likes(story_id)).to match_array [
+ {
+ user_id: user_a.id,
+ name: "user ayy",
+ story_id: story_id
+ }
+ ]
+
+ repo.toggle_like(story_id)
+ expect(described_class.fetch_likes(story_id)).to match_array []
+
+ repo.toggle_like(story_id)
+ expect(described_class.fetch_likes(story_id)).to match_array [
+ {
+ user_id: user_a.id,
+ name: "user ayy",
+ story_id: story_id
+ }
+ ]
+ end
+
+ context "with multiple likers" do
+ it "should show multiple names" do
+ LikeRepo.new(user_a.id).toggle_like(story_id)
+ LikeRepo.new(user_b.id).toggle_like(story_id)
+
+ expect(LikeRepo.fetch_likes(story_id)).to match_array [
+ {
+ user_id: user_a.id,
+ name: "user ayy",
+ story_id: story_id
+ },
+ {
+ user_id: user_b.id,
+ name: "user bee",
+ story_id: story_id
+ }
+ ]
+ end
+ end
+ end
+
+ describe ".fetch_grouped_likes" do
+ it "should return a return a hash keyed by story_id" do
+ LikeRepo.new(user_a.id)
+ .toggle_like(100)
+ .toggle_like(200)
+
+ LikeRepo.new(user_b.id)
+ .toggle_like(200)
+ .toggle_like(300)
+
+ # shouldn't be found
+ LikeRepo.new(user_c.id)
+ .toggle_like(500)
+
+ expect(LikeRepo.fetch_grouped_likes(
+ [100, 200, 300, 400]
+ )).to include(
+ 100 => "user ayy",
+ 200 => "user ayy, user bee",
+ 300 => "user bee"
+ )
+ end
+
+ it "will return all likes if not given any story_ids" do
+ LikeRepo.new(user_a.id)
+ .toggle_like(100)
+ .toggle_like(200)
+
+ LikeRepo.new(user_b.id)
+ .toggle_like(200)
+ .toggle_like(300)
+
+ # should be found
+ LikeRepo.new(user_c.id)
+ .toggle_like(500)
+
+ expect(LikeRepo.fetch_grouped_likes)
+ .to include(
+ 100 => "user ayy",
+ 200 => "user bee, user ayy",
+ 300 => "user bee",
+ 500 => "user see"
+ )
+ end
+ end
+end
From abc599e0cf9ce2bf71d3732c5651396c78a007b3 Mon Sep 17 00:00:00 2001
From: densetsu9 <78060710+densetsu9@users.noreply.github.com>
Date: Fri, 2 Aug 2024 15:02:49 -0400
Subject: [PATCH 2/2] whoops, removed patch
---
denzil.patch | 1686 --------------------------------------------------
1 file changed, 1686 deletions(-)
delete mode 100644 denzil.patch
diff --git a/denzil.patch b/denzil.patch
deleted file mode 100644
index b3668cb8..00000000
--- a/denzil.patch
+++ /dev/null
@@ -1,1686 +0,0 @@
-diff --git a/.gitignore b/.gitignore
-index 82701fe..f38d4ed 100644
---- a/.gitignore
-+++ b/.gitignore
-@@ -17,3 +17,4 @@
- /yarn-error.log
-
- .byebug_history
-+.vscode
-diff --git a/DENZIL.md b/DENZIL.md
-new file mode 100644
-index 0000000..8019b37
---- /dev/null
-+++ b/DENZIL.md
-@@ -0,0 +1,62 @@
-+# Denzil Kriekenbeek take home exercise
-+
-+## Installation problems on Apple M1 Mac Book Air, Sonoma 14.4.1 (23E224)
-+
-+### nio4r gem failing bundle install:
-+ ```
-+ gem install nio4r -v 2.5.8 -- --with-cflags="-Wno-incompatible-pointer-types"
-+ ```
-+
-+### Postgres gem install failing bundle install due to Postgres not being installed:
-+ ```
-+ brew install postgresql
-+ brew services start postgresql@14
-+ ```
-+
-+### Wrong version of OpenSSL being used when building Ruby 3.1.2 with ruby-install
-+ Add to .zshrc
-+ ```
-+ export PATH="/opt/homebrew/opt/openssl@1.1/bin:$PATH"
-+ export LIBRARY_PATH="$LIBRARY_PATH:/opt/homebrew/opt/openssl@1.1/lib/"
-+ export RUBY_CONFIGURE_OPTS="--with-openssl-dir=$(brew --prefix openssl@1.1)"
-+ ```
-+ `ruby-install ruby-3.1.2`
-+
-+## Initial Impressions:
-+- The Hacker News API requires N+1 requests to populate a page, we'll have to do some significant caching to make this tolerable.
-+
-+- 1st step would be to refresh my memory by reading
-+https://guides.rubyonrails.org/caching_with_rails.html
-+
-+- Might need to enable development (in memory) cache with bin/rails dev:cache, but this would mean production would need an alternate memory store (memcached?). No need to decide now.
-+
-+## Random thought livestream:
-+
-+- Any time an API is involved, I reach for the VCR gem; it mocks out external API calls, allowing for deterministic unit tests. Incidentally, while reading the docs, noticed that the supported Typhoeus library can handle parallel requests. Seems applicable to this problem.
-+
-+- There's a nagging deprecation warning that seems easily fixable.
-+
-+- Login/logout is the first requirement, and I see the devise gem in the gemfile, so let's get that working next.
-+
-+- Heh, didn't realize the User table already had all the devise columns until I went to create a migration. *facepalm* As an aside, I was going to add the annotate gem for easy schema reference.
-+
-+- Now that we can guarantee that users are logged in, the next step is to retrieve Hacker News entries via its API. Will brute force the N+1 request first, then iterate from there.
-+
-+- My plan is to create a "repository" to abstract away all this API work. But in good TDD practice, I'll start by writing a failing test that lets me design my interface.
-+
-+- Hmm, the API has an "ask" item that is unhelpfully tagged also typed as a "story". The only difference I see is that an "ask" has a text field, where a real "story" does not. But I suppose for this exercise we only care about titles.
-+
-+- Whoops neglected this file: Got my brute force scraper working. Piped that output to home page. Added a like button to each row... Next step is to make it do something, which involves creating table for this data to live in.
-+
-+- I added low level cacheing for the scrape results. The likes would have to be dynamic, so there would have to be some collation of the data sets. Hence the introduction of my collator classes. I realized that the home page's cache expiration would have to be on the order of minutes, whereas each individual story details cache could live for days. As a result, every piece of information should only be loaded once, keeping our bandwidth low at the expense of some cache space.
-+
-+- I was considering doing some partial render caching as well, but I also wanted to submit it before EOW :)
-+
-+- I'm also glad to have included the typhoeus gem to do parallel fetches, which should prevent a heavy waterfall on initial page load.
-+
-+
-+## Final Thoughts:
-+- This was a really fun exercise! I haven't used Rails 7 before, so I took the opportunity to acquaint myself with how Stimulus worked. I'm happy with the resulting "SPA"-like experience. I think you'll see that I'm very test driven, and I like to build facades of abstraction that make making tweaks later easier. After building all the tools I needed for the home page, I was able to build the liked page in a few minutes. Thank you, and I hope to hear from you soon!
-+
-+Sincerely,
-+-Denzil
-diff --git a/Gemfile b/Gemfile
-index 5a8ffc4..2eba0d2 100644
---- a/Gemfile
-+++ b/Gemfile
-@@ -2,6 +2,7 @@ source 'https://rubygems.org'
-
- ruby File.read('.ruby-version').chomp
-
-+gem 'annotate', group: :development # reminds us of model schemas
- gem 'byebug', platforms: [:mri, :mingw, :x64_mingw], group: [:development, :test]
- gem 'capybara', group: [:development, :test]
- gem 'coffee-rails'
-@@ -17,6 +18,8 @@ gem 'sass-rails'
- gem 'selenium-webdriver', group: [:development, :test]
- gem 'spring', group: :development
- gem 'turbolinks'
-+gem 'typhoeus' # parallelizes http requests
- gem 'tzinfo-data', platforms: [:mingw, :mswin, :x64_mingw, :jruby]
- gem 'uglifier'
-+gem "vcr", group: :test # mocks http requests
- gem 'web-console', group: :development
-diff --git a/Gemfile.lock b/Gemfile.lock
-index 14ec645..72789b6 100644
---- a/Gemfile.lock
-+++ b/Gemfile.lock
-@@ -68,6 +68,9 @@ GEM
- tzinfo (~> 2.0)
- addressable (2.8.1)
- public_suffix (>= 2.0.2, < 6.0)
-+ annotate (3.2.0)
-+ activerecord (>= 3.2, < 8.0)
-+ rake (>= 10.4, < 14.0)
- bcrypt (3.1.18)
- bindex (0.8.1)
- builder (3.2.4)
-@@ -101,6 +104,8 @@ GEM
- diff-lcs (1.5.0)
- digest (3.1.0)
- erubi (1.11.0)
-+ ethon (0.16.0)
-+ ffi (>= 1.15.0)
- execjs (2.8.1)
- ffi (1.15.5)
- globalid (1.0.0)
-@@ -239,10 +244,13 @@ GEM
- turbolinks (5.2.1)
- turbolinks-source (~> 5.2)
- turbolinks-source (5.2.0)
-+ typhoeus (1.4.1)
-+ ethon (>= 0.9.0)
- tzinfo (2.0.5)
- concurrent-ruby (~> 1.0)
- uglifier (4.2.0)
- execjs (>= 0.3.0, < 3)
-+ vcr (6.2.0)
- warden (1.2.9)
- rack (>= 2.0.9)
- web-console (4.2.0)
-@@ -262,6 +270,7 @@ PLATFORMS
- ruby
-
- DEPENDENCIES
-+ annotate
- byebug
- capybara
- coffee-rails
-@@ -277,8 +286,10 @@ DEPENDENCIES
- selenium-webdriver
- spring
- turbolinks
-+ typhoeus
- tzinfo-data
- uglifier
-+ vcr
- web-console
-
- RUBY VERSION
-diff --git a/app/assets/javascripts/likes.js b/app/assets/javascripts/likes.js
-new file mode 100644
-index 0000000..19cd140
---- /dev/null
-+++ b/app/assets/javascripts/likes.js
-@@ -0,0 +1,37 @@
-+
-+class Likes {
-+ constructor() {
-+ document.addEventListener("ajax:success", this.ajax_listener);
-+ }
-+
-+ ajax_listener = (event) => {
-+ const [data, _status, _xhr] = event.detail;
-+ const { cmd, ...json } = data;
-+
-+ switch (data.cmd) {
-+ case "update_story_likes":
-+ const { story_id, likers } = json;
-+ return this.update_story_likes(story_id, likers);
-+ }
-+ }
-+
-+ update_story_likes(story_id, likers) {
-+ const storyLikeElementId = "story_likes_" + story_id;
-+
-+ let newContents = "";
-+ if (likers.length > 0) {
-+ newContents = "Liked by: "
-+ newContents += likers
-+ .map(liker => liker.name)
-+ .join(", ")
-+ }
-+
-+ this.replace_element(storyLikeElementId, newContents);
-+ }
-+
-+ replace_element(elementId, newContents) {
-+ document.getElementById(elementId).innerHTML = newContents;
-+ }
-+}
-+
-+new Likes();
-diff --git a/app/controllers/pages_controller.rb b/app/controllers/pages_controller.rb
-index ce3bf58..0cf8572 100644
---- a/app/controllers/pages_controller.rb
-+++ b/app/controllers/pages_controller.rb
-@@ -1,2 +1,28 @@
- class PagesController < ApplicationController
-+ before_action :authenticate_user!
-+
-+ MAX_STORIES = 20
-+
-+ def home
-+ home_page_data = HomePageCollator.call(limit: MAX_STORIES)
-+ render locals: home_page_data
-+ end
-+
-+ def liked_index
-+ liked_page_data = LikedPageCollator.call
-+ render locals: liked_page_data
-+ end
-+
-+ def like_story
-+ story_id = params.require(:story_id)
-+ LikeRepo.new(current_user.id)
-+ .toggle_like(story_id)
-+
-+ likers = LikeRepo.fetch_likes(story_id)
-+ render json: {
-+ cmd: :update_story_likes,
-+ story_id: story_id,
-+ likers: likers
-+ }
-+ end
- end
-diff --git a/app/models/like.rb b/app/models/like.rb
-new file mode 100644
-index 0000000..8ae5d3b
---- /dev/null
-+++ b/app/models/like.rb
-@@ -0,0 +1,23 @@
-+# == Schema Information
-+#
-+# Table name: likes
-+#
-+# id :bigint not null, primary key
-+# active :boolean default(FALSE)
-+# created_at :datetime not null
-+# updated_at :datetime not null
-+# story_id :integer
-+# user_id :bigint
-+#
-+# Indexes
-+#
-+# index_likes_on_user_id (user_id)
-+# index_likes_on_user_id_and_story_id (user_id,story_id) UNIQUE
-+#
-+class Like < ApplicationRecord
-+ belongs_to :user
-+
-+ def user_name
-+ user.full_name
-+ end
-+end
-diff --git a/app/models/user.rb b/app/models/user.rb
-index b2091f9..707177d 100644
---- a/app/models/user.rb
-+++ b/app/models/user.rb
-@@ -1,6 +1,35 @@
-+# == Schema Information
-+#
-+# Table name: users
-+#
-+# id :bigint not null, primary key
-+# current_sign_in_at :datetime
-+# current_sign_in_ip :inet
-+# email :string default(""), not null
-+# encrypted_password :string default(""), not null
-+# first_name :string
-+# last_name :string
-+# last_sign_in_at :datetime
-+# last_sign_in_ip :inet
-+# remember_created_at :datetime
-+# reset_password_sent_at :datetime
-+# reset_password_token :string
-+# sign_in_count :integer default(0), not null
-+# created_at :datetime not null
-+# updated_at :datetime not null
-+#
-+# Indexes
-+#
-+# index_users_on_email (email) UNIQUE
-+# index_users_on_reset_password_token (reset_password_token) UNIQUE
-+#
- class User < ApplicationRecord
- # Include default devise modules. Others available are:
- # :confirmable, :lockable, :timeoutable and :omniauthable
- devise :database_authenticatable, :registerable,
-- :recoverable, :rememberable, :trackable, :validatable
-+ :recoverable, :rememberable, :trackable, :validatable
-+
-+ def full_name
-+ "#{first_name} #{last_name}"
-+ end
- end
-diff --git a/app/repos/hacker_news_scraper.rb b/app/repos/hacker_news_scraper.rb
-new file mode 100644
-index 0000000..88102ae
---- /dev/null
-+++ b/app/repos/hacker_news_scraper.rb
-@@ -0,0 +1,81 @@
-+class HackerNewsScraper
-+ API_ROOT = "https://hacker-news.firebaseio.com/v0/"
-+
-+ def self.retrieve_top_stories(
-+ limit: nil,
-+ cache_expiry: 3.minutes,
-+ relevant_fields: []
-+ )
-+ Rails.cache.fetch(
-+ :hacker_news_top_stories,
-+ expires_in: cache_expiry
-+ ) do
-+ scraper = new
-+ story_ids = scraper.fetch_top_story_ids
-+ limit ||= story_ids.size
-+
-+ scraper
-+ .fetch_stories(story_ids.first(limit))
-+ .map do |story_details|
-+ story_details.slice(*relevant_fields)
-+ end
-+ end
-+ end
-+
-+ def self.retrieve_story_details(
-+ story_id:,
-+ cache_expiry: 1.day,
-+ relevant_fields: []
-+ )
-+ Rails.cache.fetch(
-+ [:story_details, story_id],
-+ expires_in: cache_expiry
-+ ) do
-+ story_details = new.fetch_story_details(story_id)
-+ story_details.slice(*relevant_fields)
-+ end
-+ end
-+
-+ def fetch_top_story_ids
-+ end_point = "topstories.json"
-+ get(end_point)
-+ end
-+
-+ def fetch_story_details(story_id)
-+ end_point = story_endpoint(story_id)
-+ get(end_point)
-+ .symbolize_keys
-+ end
-+
-+ def fetch_stories(story_ids)
-+ hydra = Typhoeus::Hydra.new
-+ requests = build_hydra_requests(story_ids) do |request|
-+ hydra.queue(request)
-+ end
-+ hydra.run
-+
-+ requests.map do |request|
-+ JSON.parse(request.response.body)
-+ .symbolize_keys
-+ end
-+ end
-+
-+ private
-+
-+ def get(api_endpoint)
-+ request = Typhoeus.get(API_ROOT + api_endpoint)
-+ JSON.parse(request.response_body)
-+ end
-+
-+ def story_endpoint(story_id)
-+ "item/#{story_id}.json"
-+ end
-+
-+ def build_hydra_requests(story_ids, &block)
-+ story_ids.map do |story_id|
-+ api_end_point = API_ROOT + story_endpoint(story_id)
-+ Typhoeus::Request.new(api_end_point)
-+ .tap { |req| block.call(req) }
-+ end
-+ end
-+end
-diff --git a/app/repos/home_page_collator.rb b/app/repos/home_page_collator.rb
-new file mode 100644
-index 0000000..2b39989
---- /dev/null
-+++ b/app/repos/home_page_collator.rb
-@@ -0,0 +1,52 @@
-+class HomePageCollator
-+ def self.call(limit: nil, cache_expiry: nil)
-+ repo = new
-+ repo.limit = limit if limit.present?
-+ repo.cache_expiry = cache_expiry if cache_expiry.present?
-+ repo.execute
-+ end
-+
-+ attr_accessor :limit, :cache_expiry
-+
-+ def initialize
-+ @scraper = HackerNewsScraper
-+ @like_repo = LikeRepo
-+ @limit = nil
-+ @cache_expiry = 3.minutes # top stories will be in constant flux
-+ end
-+
-+ def execute
-+ story_data = scrape_news_data
-+ story_ids = extract_story_ids(story_data)
-+ liker_data = lookup_likes(story_ids)
-+
-+ {
-+ story_data: story_data,
-+ liker_data: liker_data
-+ }
-+ end
-+
-+ private
-+
-+ attr_reader :scraper, :like_repo
-+
-+ def scrape_news_data
-+ scraper.retrieve_top_stories(
-+ limit: limit,
-+ cache_expiry: cache_expiry,
-+ relevant_fields: [
-+ :id,
-+ :title,
-+ :url
-+ ]
-+ )
-+ end
-+
-+ def extract_story_ids(scraped_data)
-+ scraped_data.map { |story| story[:id] }
-+ end
-+
-+ def lookup_likes(story_ids)
-+ like_repo.fetch_grouped_likes(story_ids)
-+ end
-+end
-diff --git a/app/repos/like_repo.rb b/app/repos/like_repo.rb
-new file mode 100644
-index 0000000..f5a49b6
---- /dev/null
-+++ b/app/repos/like_repo.rb
-@@ -0,0 +1,62 @@
-+class LikeRepo
-+ def initialize(user_id)
-+ @user_id = user_id
-+ end
-+
-+ def toggle_like(story_id)
-+ like = Like.find_or_create_by(
-+ story_id: story_id,
-+ user_id: user_id
-+ )
-+ like.toggle!(:active)
-+ self
-+ end
-+
-+ def self.fetch_grouped_likes(story_ids = [])
-+ likes = story_ids.blank? ?
-+ fetch_all_likes :
-+ fetch_likes(story_ids)
-+
-+ likes
-+ .group_by { |like| like[:story_id] }
-+ .transform_values do |likes|
-+ likes
-+ .map { |like| like[:name] }
-+ .join(", ")
-+ end
-+ end
-+
-+ def self.fetch_likes(story_id)
-+ likes = Like
-+ .includes(:user)
-+ .where(
-+ story_id: story_id,
-+ active: true
-+ )
-+
-+ likes.map { |like| format_like(like) }
-+ end
-+
-+ def self.fetch_all_likes
-+ likes = Like
-+ .includes(:user)
-+ .where(
-+ active: true
-+ )
-+ .order(id: :desc)
-+
-+ likes.map { |like| format_like(like) }
-+ end
-+
-+ def self.format_like(like)
-+ {
-+ story_id: like.story_id,
-+ user_id: like.user_id,
-+ name: like.user_name
-+ }
-+ end
-+
-+ private
-+
-+ attr_reader :user_id
-+end
-diff --git a/app/repos/liked_page_collator.rb b/app/repos/liked_page_collator.rb
-new file mode 100644
-index 0000000..9f2f1cf
---- /dev/null
-+++ b/app/repos/liked_page_collator.rb
-@@ -0,0 +1,48 @@
-+class LikedPageCollator
-+ def self.call(cache_expiry: nil)
-+ repo = new
-+ repo.cache_expiry = cache_expiry if cache_expiry.present?
-+ repo.execute
-+ end
-+
-+ attr_accessor :cache_expiry
-+
-+ def initialize
-+ @scraper = HackerNewsScraper
-+ @like_repo = LikeRepo
-+ @cache_expiry = 1.day # story details won't change often, if at all
-+ end
-+
-+ def execute
-+ liker_data = lookup_all_likes
-+ story_ids = liker_data.keys
-+ scraped_data = scrape_news_data(story_ids)
-+
-+ {
-+ story_data: scraped_data,
-+ liker_data: liker_data
-+ }
-+ end
-+
-+ private
-+
-+ attr_reader :scraper, :like_repo
-+
-+ def lookup_all_likes
-+ like_repo.fetch_grouped_likes
-+ end
-+
-+ def scrape_news_data(story_ids)
-+ story_ids.map do |story_id|
-+ scraper.retrieve_story_details(
-+ story_id: story_id,
-+ cache_expiry: cache_expiry,
-+ relevant_fields: [
-+ :id,
-+ :title,
-+ :url
-+ ]
-+ )
-+ end
-+ end
-+end
-diff --git a/app/views/pages/_header.html.erb b/app/views/pages/_header.html.erb
-new file mode 100644
-index 0000000..b1fff55
---- /dev/null
-+++ b/app/views/pages/_header.html.erb
-@@ -0,0 +1,17 @@
-+
-+
-+ <%= link_to "Top Stories", "/" %>
-+
-+
-+ <%= link_to "Liked Stories", "/liked" %>
-+
-+
-+ <%=
-+ button_to(
-+ "Sign out from #{current_user.email}",
-+ destroy_user_session_path,
-+ method: :delete
-+ )
-+ %>
-+
-+
-diff --git a/app/views/pages/_story_row.html.erb b/app/views/pages/_story_row.html.erb
-new file mode 100644
-index 0000000..eacde18
---- /dev/null
-+++ b/app/views/pages/_story_row.html.erb
-@@ -0,0 +1,16 @@
-+<% likers.prepend("Liked by: ") if likers.present? %>
-+
-+
-+ <%=
-+ button_to "👍",
-+ like_story_path(story_id: story[:id]),
-+ {
-+ method: :post,
-+ remote: true,
-+ style: "background-color:#44a; cursor: pointer;",
-+ }
-+ %>
-+
<%= story[:title] %>
-+
<%= likers %>
-+
-+
-diff --git a/app/views/pages/home.html.erb b/app/views/pages/home.html.erb
-index 8bfd829..bb984a0 100644
---- a/app/views/pages/home.html.erb
-+++ b/app/views/pages/home.html.erb
-@@ -1 +1,11 @@
-+<%= render "header" %>
- Welcome to Top News
-+
-+<% story_data.each do |story_details| %>
-+ <%= render(
-+ "story_row",
-+ story: story_details,
-+ likers: liker_data[story_details[:id]]
-+ )%>
-+<% end %>
-+
-diff --git a/app/views/pages/liked_index.html.erb b/app/views/pages/liked_index.html.erb
-new file mode 100644
-index 0000000..9435219
---- /dev/null
-+++ b/app/views/pages/liked_index.html.erb
-@@ -0,0 +1,11 @@
-+<%= render "header" %>
-+All Liked Stories
-+
-+<% story_data.each do |story_details| %>
-+ <%= render(
-+ "story_row",
-+ story: story_details,
-+ likers: liker_data[story_details[:id]]
-+ )%>
-+<% end %>
-+
-diff --git a/config/application.rb b/config/application.rb
-index dab4cec..f2b7546 100644
---- a/config/application.rb
-+++ b/config/application.rb
-@@ -10,6 +10,7 @@ module Topnews
- class Application < Rails::Application
- # Initialize configuration defaults for originally generated Rails version.
- config.load_defaults 5.1
-+ config.active_record.legacy_connection_handling = false
-
- # Settings in config/environments/* take precedence over those specified here.
- # Application configuration should go into files in config/initializers
-diff --git a/config/environments/test.rb b/config/environments/test.rb
-index 8e5cbde..7b6dc4c 100644
---- a/config/environments/test.rb
-+++ b/config/environments/test.rb
-@@ -5,7 +5,7 @@ Rails.application.configure do
- # test suite. You never need to work with it otherwise. Remember that
- # your test database is "scratch space" for the test suite and is wiped
- # and recreated between test runs. Don't rely on the data there!
-- config.cache_classes = true
-+ config.cache_classes = false
-
- # Do not eager load code on boot. This avoids loading your whole application
- # just for the purpose of running a single test. If you are using a tool that
-diff --git a/config/initializers/devise.rb b/config/initializers/devise.rb
-index 962d4a7..235093e 100644
---- a/config/initializers/devise.rb
-+++ b/config/initializers/devise.rb
-@@ -244,7 +244,7 @@ Devise.setup do |config|
- # should add them to the navigational formats lists.
- #
- # The "*/*" below is required to match Internet Explorer requests.
-- # config.navigational_formats = ['*/*', :html]
-+ config.navigational_formats = ['*/*', :html, :turbo_stream]
-
- # The default HTTP method used to sign out a resource. Default is :delete.
- config.sign_out_via = :delete
-diff --git a/config/routes.rb b/config/routes.rb
-index c12ef08..5163a68 100644
---- a/config/routes.rb
-+++ b/config/routes.rb
-@@ -1,4 +1,7 @@
- Rails.application.routes.draw do
- devise_for :users
-- root to: 'pages#home'
-+ root to: "pages#home"
-+
-+ get "liked", to: "pages#liked_index", as: :liked_index
-+ post "like/:story_id", to: "pages#like_story", as: :like_story
- end
-diff --git a/db/migrate/20240801001753_create_like_table.rb b/db/migrate/20240801001753_create_like_table.rb
-new file mode 100644
-index 0000000..3f99c6f
---- /dev/null
-+++ b/db/migrate/20240801001753_create_like_table.rb
-@@ -0,0 +1,11 @@
-+class CreateLikeTable < ActiveRecord::Migration[7.0]
-+ def change
-+ create_table :likes do |t|
-+ t.references :user
-+ t.integer :story_id
-+ t.index [:user_id, :story_id], unique: true
-+ t.boolean :active, default: false
-+ t.timestamps
-+ end
-+ end
-+end
-diff --git a/db/schema.rb b/db/schema.rb
-index acc34f3..3ec6005 100644
---- a/db/schema.rb
-+++ b/db/schema.rb
-@@ -10,10 +10,20 @@
- #
- # It's strongly recommended that you check this file into your version control system.
-
--ActiveRecord::Schema[7.0].define(version: 2018_02_28_212101) do
-+ActiveRecord::Schema[7.0].define(version: 2024_08_01_001753) do
- # These are extensions that must be enabled in order to support this database
- enable_extension "plpgsql"
-
-+ create_table "likes", force: :cascade do |t|
-+ t.bigint "user_id"
-+ t.integer "story_id"
-+ t.boolean "active", default: false
-+ t.datetime "created_at", null: false
-+ t.datetime "updated_at", null: false
-+ t.index ["user_id", "story_id"], name: "index_likes_on_user_id_and_story_id", unique: true
-+ t.index ["user_id"], name: "index_likes_on_user_id"
-+ end
-+
- create_table "users", force: :cascade do |t|
- t.string "first_name"
- t.string "last_name"
-diff --git a/fixtures/vcr_cassettes/story_details_41119080.yml b/fixtures/vcr_cassettes/story_details_41119080.yml
-new file mode 100644
-index 0000000..1168103
---- /dev/null
-+++ b/fixtures/vcr_cassettes/story_details_41119080.yml
-@@ -0,0 +1,43 @@
-+---
-+http_interactions:
-+- request:
-+ method: get
-+ uri: https://hacker-news.firebaseio.com/v0/item/41119080.json
-+ body:
-+ encoding: US-ASCII
-+ string: ''
-+ headers:
-+ User-Agent:
-+ - Typhoeus - https://github.com/typhoeus/typhoeus
-+ Expect:
-+ - ''
-+ response:
-+ status:
-+ code: 200
-+ message: OK
-+ headers:
-+ Server:
-+ - nginx
-+ Date:
-+ - Wed, 31 Jul 2024 19:13:44 GMT
-+ Content-Type:
-+ - application/json; charset=utf-8
-+ Content-Length:
-+ - '334'
-+ Connection:
-+ - keep-alive
-+ Access-Control-Allow-Origin:
-+ - "*"
-+ Cache-Control:
-+ - no-cache
-+ Strict-Transport-Security:
-+ - max-age=31556926; includeSubDomains; preload
-+ body:
-+ encoding: ASCII-8BIT
-+ string: '{"by":"Brajeshwar","descendants":52,"id":41119080,"kids":[41120013,41122192,41121903,41119817,41120734,41121026,41119579,41119896,41119789,41120995,41119829],"score":125,"time":1722433249,"title":"How
-+ great was the Great Oxidation Event?","type":"story","url":"https://eos.org/science-updates/how-great-was-the-great-oxidation-event"}'
-+ http_version: '1.1'
-+ adapter_metadata:
-+ effective_url: https://hacker-news.firebaseio.com/v0/item/41119080.json
-+ recorded_at: Wed, 31 Jul 2024 19:13:44 GMT
-+recorded_with: VCR 6.2.0
-diff --git a/fixtures/vcr_cassettes/story_details_41119443.yml b/fixtures/vcr_cassettes/story_details_41119443.yml
-new file mode 100644
-index 0000000..4a415fa
---- /dev/null
-+++ b/fixtures/vcr_cassettes/story_details_41119443.yml
-@@ -0,0 +1,46 @@
-+---
-+http_interactions:
-+- request:
-+ method: get
-+ uri: https://hacker-news.firebaseio.com/v0/item/41119443.json
-+ body:
-+ encoding: US-ASCII
-+ string: ''
-+ headers:
-+ User-Agent:
-+ - Typhoeus - https://github.com/typhoeus/typhoeus
-+ Expect:
-+ - ''
-+ response:
-+ status:
-+ code: 200
-+ message: OK
-+ headers:
-+ Server:
-+ - nginx
-+ Date:
-+ - Wed, 31 Jul 2024 21:10:50 GMT
-+ Content-Type:
-+ - application/json; charset=utf-8
-+ Content-Length:
-+ - '5639'
-+ Connection:
-+ - keep-alive
-+ Access-Control-Allow-Origin:
-+ - "*"
-+ Cache-Control:
-+ - no-cache
-+ Strict-Transport-Security:
-+ - max-age=31556926; includeSubDomains; preload
-+ body:
-+ encoding: UTF-8
-+ string: '{"by":"darweenist","descendants":59,"id":41119443,"kids":[41122773,41123293,41122675],"score":77,"text":"Hey
-+ HN! Dawson here from Martin (https://www.trymartin.com).
-+ Martin is a better Siri with an LLM brain and deeper integrations with everyday
-+ apps.","time":1722435867,"title":"Launch HN: Martin
-+ (YC S23) – Using LLMs to Make a Better Siri","type":"story"}'
-+ http_version: '1.1'
-+ adapter_metadata:
-+ effective_url: https://hacker-news.firebaseio.com/v0/item/41119443.json
-+ recorded_at: Wed, 31 Jul 2024 21:10:50 GMT
-+recorded_with: VCR 6.2.0
-diff --git a/fixtures/vcr_cassettes/story_details_multi.yml b/fixtures/vcr_cassettes/story_details_multi.yml
-new file mode 100644
-index 0000000..6005d5a
---- /dev/null
-+++ b/fixtures/vcr_cassettes/story_details_multi.yml
-@@ -0,0 +1,126 @@
-+---
-+http_interactions:
-+- request:
-+ method: get
-+ uri: https://hacker-news.firebaseio.com/v0/item/41119080.json
-+ body:
-+ encoding: US-ASCII
-+ string: ''
-+ headers:
-+ User-Agent:
-+ - Typhoeus - https://github.com/typhoeus/typhoeus
-+ Expect:
-+ - ''
-+ response:
-+ status:
-+ code: 200
-+ message: OK
-+ headers:
-+ Server:
-+ - nginx
-+ Date:
-+ - Wed, 31 Jul 2024 20:48:03 GMT
-+ Content-Type:
-+ - application/json; charset=utf-8
-+ Content-Length:
-+ - '343'
-+ Connection:
-+ - keep-alive
-+ Access-Control-Allow-Origin:
-+ - "*"
-+ Cache-Control:
-+ - no-cache
-+ Strict-Transport-Security:
-+ - max-age=31556926; includeSubDomains; preload
-+ body:
-+ encoding: UTF-8
-+ string: '{"by":"Brajeshwar","descendants":63,"id":41119080,"kids":[41122732,41119817,41121903],"score":143,"time":1722433249,"title":"How
-+ great was the Great Oxidation Event?","type":"story","url":"https://eos.org/science-updates/how-great-was-the-great-oxidation-event"}'
-+ http_version: '1.1'
-+ adapter_metadata:
-+ effective_url: https://hacker-news.firebaseio.com/v0/item/41119080.json
-+ recorded_at: Wed, 31 Jul 2024 20:48:03 GMT
-+- request:
-+ method: get
-+ uri: https://hacker-news.firebaseio.com/v0/item/41120254.json
-+ body:
-+ encoding: US-ASCII
-+ string: ''
-+ headers:
-+ User-Agent:
-+ - Typhoeus - https://github.com/typhoeus/typhoeus
-+ Expect:
-+ - ''
-+ response:
-+ status:
-+ code: 200
-+ message: OK
-+ headers:
-+ Server:
-+ - nginx
-+ Date:
-+ - Wed, 31 Jul 2024 20:48:03 GMT
-+ Content-Type:
-+ - application/json; charset=utf-8
-+ Content-Length:
-+ - '910'
-+ Connection:
-+ - keep-alive
-+ Access-Control-Allow-Origin:
-+ - "*"
-+ Cache-Control:
-+ - no-cache
-+ Strict-Transport-Security:
-+ - max-age=31556926; includeSubDomains; preload
-+ body:
-+ encoding: UTF-8
-+ string: '{"by":"BerislavLopac","descendants":236,"id":41120254,"kids":[41123233,41121444,41123268],"score":212,"time":1722440995,"title":"I
-+ prefer rST to Markdown","type":"story","url":"https://buttondown.email/hillelwayne/archive/why-i-prefer-rst-to-markdown/"}'
-+ http_version: '1.1'
-+ adapter_metadata:
-+ effective_url: https://hacker-news.firebaseio.com/v0/item/41120254.json
-+ recorded_at: Wed, 31 Jul 2024 20:48:03 GMT
-+- request:
-+ method: get
-+ uri: https://hacker-news.firebaseio.com/v0/item/41119443.json
-+ body:
-+ encoding: US-ASCII
-+ string: ''
-+ headers:
-+ User-Agent:
-+ - Typhoeus - https://github.com/typhoeus/typhoeus
-+ Expect:
-+ - ''
-+ response:
-+ status:
-+ code: 200
-+ message: OK
-+ headers:
-+ Server:
-+ - nginx
-+ Date:
-+ - Wed, 31 Jul 2024 20:48:03 GMT
-+ Content-Type:
-+ - application/json; charset=utf-8
-+ Content-Length:
-+ - '5612'
-+ Connection:
-+ - keep-alive
-+ Access-Control-Allow-Origin:
-+ - "*"
-+ Cache-Control:
-+ - no-cache
-+ Strict-Transport-Security:
-+ - max-age=31556926; includeSubDomains; preload
-+ body:
-+ encoding: UTF-8
-+ string: '{"by":"darweenist","descendants":53,"id":41119443,"kids":[41122773,41122675,41121790],"score":73,"text":"Hey
-+ HN! Dawson here from Martin (https://www.trymartin.com).
-+ Martin is a better Siri with an LLM brain and deeper integrations with everyday
-+ apps.","time":1722435867,"title":"Launch HN: Martin
-+ (YC S23) – Using LLMs to Make a Better Siri","type":"story"}'
-+ http_version: '1.1'
-+ adapter_metadata:
-+ effective_url: https://hacker-news.firebaseio.com/v0/item/41119443.json
-+ recorded_at: Wed, 31 Jul 2024 20:48:03 GMT
-+recorded_with: VCR 6.2.0
-diff --git a/fixtures/vcr_cassettes/top_stories.yml b/fixtures/vcr_cassettes/top_stories.yml
-new file mode 100644
-index 0000000..a433377
---- /dev/null
-+++ b/fixtures/vcr_cassettes/top_stories.yml
-@@ -0,0 +1,42 @@
-+---
-+http_interactions:
-+- request:
-+ method: get
-+ uri: https://hacker-news.firebaseio.com/v0/topstories.json
-+ body:
-+ encoding: US-ASCII
-+ string: ''
-+ headers:
-+ User-Agent:
-+ - Typhoeus - https://github.com/typhoeus/typhoeus
-+ Expect:
-+ - ''
-+ response:
-+ status:
-+ code: 200
-+ message: OK
-+ headers:
-+ Server:
-+ - nginx
-+ Date:
-+ - Wed, 31 Jul 2024 18:58:34 GMT
-+ Content-Type:
-+ - application/json; charset=utf-8
-+ Content-Length:
-+ - '4501'
-+ Connection:
-+ - keep-alive
-+ Access-Control-Allow-Origin:
-+ - "*"
-+ Cache-Control:
-+ - no-cache
-+ Strict-Transport-Security:
-+ - max-age=31556926; includeSubDomains; preload
-+ body:
-+ encoding: ASCII-8BIT
-+ string: "[41119080,41120254,41119443]"
-+ http_version: '1.1'
-+ adapter_metadata:
-+ effective_url: https://hacker-news.firebaseio.com/v0/topstories.json
-+ recorded_at: Wed, 31 Jul 2024 18:58:34 GMT
-+recorded_with: VCR 6.2.0
-diff --git a/fixtures/vcr_cassettes/top_story_full_refresh.yml b/fixtures/vcr_cassettes/top_story_full_refresh.yml
-new file mode 100644
-index 0000000..3a5466d
---- /dev/null
-+++ b/fixtures/vcr_cassettes/top_story_full_refresh.yml
-@@ -0,0 +1,164 @@
-+---
-+http_interactions:
-+- request:
-+ method: get
-+ uri: https://hacker-news.firebaseio.com/v0/topstories.json
-+ body:
-+ encoding: US-ASCII
-+ string: ''
-+ headers:
-+ User-Agent:
-+ - Typhoeus - https://github.com/typhoeus/typhoeus
-+ Expect:
-+ - ''
-+ response:
-+ status:
-+ code: 200
-+ message: OK
-+ headers:
-+ Server:
-+ - nginx
-+ Date:
-+ - Wed, 31 Jul 2024 22:02:12 GMT
-+ Content-Type:
-+ - application/json; charset=utf-8
-+ Content-Length:
-+ - '4501'
-+ Connection:
-+ - keep-alive
-+ Access-Control-Allow-Origin:
-+ - "*"
-+ Cache-Control:
-+ - no-cache
-+ Strict-Transport-Security:
-+ - max-age=31556926; includeSubDomains; preload
-+ body:
-+ encoding: UTF-8
-+ string: "[41123155,41119080,41119443]"
-+ http_version: '1.1'
-+ adapter_metadata:
-+ effective_url: https://hacker-news.firebaseio.com/v0/topstories.json
-+ recorded_at: Wed, 31 Jul 2024 22:02:12 GMT
-+- request:
-+ method: get
-+ uri: https://hacker-news.firebaseio.com/v0/item/41123155.json
-+ body:
-+ encoding: US-ASCII
-+ string: ''
-+ headers:
-+ User-Agent:
-+ - Typhoeus - https://github.com/typhoeus/typhoeus
-+ Expect:
-+ - ''
-+ response:
-+ status:
-+ code: 200
-+ message: OK
-+ headers:
-+ Server:
-+ - nginx
-+ Date:
-+ - Wed, 31 Jul 2024 22:02:13 GMT
-+ Content-Type:
-+ - application/json; charset=utf-8
-+ Content-Length:
-+ - '483'
-+ Connection:
-+ - keep-alive
-+ Access-Control-Allow-Origin:
-+ - "*"
-+ Cache-Control:
-+ - no-cache
-+ Strict-Transport-Security:
-+ - max-age=31556926; includeSubDomains; preload
-+ body:
-+ encoding: UTF-8
-+ string: '{"by":"kgwgk","descendants":82,"id":41123155,"kids":[41123621,41123650,41123924],"score":227,"time":1722458071,"title":"Suspicious
-+ data pattern in recent Venezuelan election","type":"story","url":"https://statmodeling.stat.columbia.edu/2024/07/31/suspicious-data-pattern-in-recent-venezuelan-election/"}'
-+ http_version: '1.1'
-+ adapter_metadata:
-+ effective_url: https://hacker-news.firebaseio.com/v0/item/41123155.json
-+ recorded_at: Wed, 31 Jul 2024 22:02:13 GMT
-+- request:
-+ method: get
-+ uri: https://hacker-news.firebaseio.com/v0/item/41119080.json
-+ body:
-+ encoding: US-ASCII
-+ string: ''
-+ headers:
-+ User-Agent:
-+ - Typhoeus - https://github.com/typhoeus/typhoeus
-+ Expect:
-+ - ''
-+ response:
-+ status:
-+ code: 200
-+ message: OK
-+ headers:
-+ Server:
-+ - nginx
-+ Date:
-+ - Wed, 31 Jul 2024 22:02:13 GMT
-+ Content-Type:
-+ - application/json; charset=utf-8
-+ Content-Length:
-+ - '352'
-+ Connection:
-+ - keep-alive
-+ Access-Control-Allow-Origin:
-+ - "*"
-+ Cache-Control:
-+ - no-cache
-+ Strict-Transport-Security:
-+ - max-age=31556926; includeSubDomains; preload
-+ body:
-+ encoding: UTF-8
-+ string: '{"by":"Brajeshwar","descendants":63,"id":41119080,"kids":[41122732,41119817,41121903],"score":156,"time":1722433249,"title":"How
-+ great was the Great Oxidation Event?","type":"story","url":"https://eos.org/science-updates/how-great-was-the-great-oxidation-event"}'
-+ http_version: '1.1'
-+ adapter_metadata:
-+ effective_url: https://hacker-news.firebaseio.com/v0/item/41119080.json
-+ recorded_at: Wed, 31 Jul 2024 22:02:13 GMT
-+- request:
-+ method: get
-+ uri: https://hacker-news.firebaseio.com/v0/item/41119443.json
-+ body:
-+ encoding: US-ASCII
-+ string: ''
-+ headers:
-+ User-Agent:
-+ - Typhoeus - https://github.com/typhoeus/typhoeus
-+ Expect:
-+ - ''
-+ response:
-+ status:
-+ code: 200
-+ message: OK
-+ headers:
-+ Server:
-+ - nginx
-+ Date:
-+ - Wed, 31 Jul 2024 22:02:13 GMT
-+ Content-Type:
-+ - application/json; charset=utf-8
-+ Content-Length:
-+ - '5657'
-+ Connection:
-+ - keep-alive
-+ Access-Control-Allow-Origin:
-+ - "*"
-+ Cache-Control:
-+ - no-cache
-+ Strict-Transport-Security:
-+ - max-age=31556926; includeSubDomains; preload
-+ body:
-+ encoding: UTF-8
-+ string: '{"by":"darweenist","descendants":64,"id":41119443,"kids":[41122773,41121790,41123293],"score":86,"text":"Hey
-+ HN! Dawson here from Martin (https://www.trymartin.com).
-+ Martin is a better Siri with an LLM brain and deeper integrations with everyday
-+ apps.","time":1722435867,"title":"Launch HN: Martin
-+ (YC S23) – Using LLMs to Make a Better Siri","type":"story"}'
-+ http_version: '1.1'
-+ adapter_metadata:
-+ effective_url: https://hacker-news.firebaseio.com/v0/item/41119443.json
-+ recorded_at: Wed, 31 Jul 2024 22:02:13 GMT
-diff --git a/lib/tasks/auto_annotate_models.rake b/lib/tasks/auto_annotate_models.rake
-new file mode 100644
-index 0000000..e96283e
---- /dev/null
-+++ b/lib/tasks/auto_annotate_models.rake
-@@ -0,0 +1,59 @@
-+# NOTE: only doing this in development as some production environments (Heroku)
-+# NOTE: are sensitive to local FS writes, and besides -- it's just not proper
-+# NOTE: to have a dev-mode tool do its thing in production.
-+if Rails.env.development?
-+ require 'annotate'
-+ task :set_annotation_options do
-+ # You can override any of these by setting an environment variable of the
-+ # same name.
-+ Annotate.set_defaults(
-+ 'active_admin' => 'false',
-+ 'additional_file_patterns' => [],
-+ 'routes' => 'false',
-+ 'models' => 'true',
-+ 'position_in_routes' => 'before',
-+ 'position_in_class' => 'before',
-+ 'position_in_test' => 'before',
-+ 'position_in_fixture' => 'before',
-+ 'position_in_factory' => 'before',
-+ 'position_in_serializer' => 'before',
-+ 'show_foreign_keys' => 'true',
-+ 'show_complete_foreign_keys' => 'false',
-+ 'show_indexes' => 'true',
-+ 'simple_indexes' => 'false',
-+ 'model_dir' => 'app/models',
-+ 'root_dir' => '',
-+ 'include_version' => 'false',
-+ 'require' => '',
-+ 'exclude_tests' => 'false',
-+ 'exclude_fixtures' => 'false',
-+ 'exclude_factories' => 'false',
-+ 'exclude_serializers' => 'false',
-+ 'exclude_scaffolds' => 'true',
-+ 'exclude_controllers' => 'true',
-+ 'exclude_helpers' => 'true',
-+ 'exclude_sti_subclasses' => 'false',
-+ 'ignore_model_sub_dir' => 'false',
-+ 'ignore_columns' => nil,
-+ 'ignore_routes' => nil,
-+ 'ignore_unknown_models' => 'false',
-+ 'hide_limit_column_types' => 'integer,bigint,boolean',
-+ 'hide_default_column_types' => 'json,jsonb,hstore',
-+ 'skip_on_db_migrate' => 'false',
-+ 'format_bare' => 'true',
-+ 'format_rdoc' => 'false',
-+ 'format_yard' => 'false',
-+ 'format_markdown' => 'false',
-+ 'sort' => 'false',
-+ 'force' => 'false',
-+ 'frozen' => 'false',
-+ 'classified_sort' => 'true',
-+ 'trace' => 'false',
-+ 'wrapper_open' => nil,
-+ 'wrapper_close' => nil,
-+ 'with_comment' => 'true'
-+ )
-+ end
-+
-+ Annotate.load_tasks
-+end
-diff --git a/spec/models/like_spec.rb b/spec/models/like_spec.rb
-new file mode 100644
-index 0000000..bf8f25e
---- /dev/null
-+++ b/spec/models/like_spec.rb
-@@ -0,0 +1,31 @@
-+# == Schema Information
-+#
-+# Table name: likes
-+#
-+# id :bigint not null, primary key
-+# active :boolean default(FALSE)
-+# created_at :datetime not null
-+# updated_at :datetime not null
-+# story_id :integer
-+# user_id :bigint
-+#
-+# Indexes
-+#
-+# index_likes_on_user_id (user_id)
-+# index_likes_on_user_id_and_story_id (user_id,story_id) UNIQUE
-+#
-+require "rails_helper"
-+
-+describe Like do
-+ describe "#user_name" do
-+ let(:user) { User.new(first_name: "foo", last_name: "bar") }
-+
-+ it "should be constructed from the liker's first/last name" do
-+ like = Like.new({
-+ user: user,
-+ story_id: 9999
-+ })
-+ expect(like.user_name).to eq "foo bar"
-+ end
-+ end
-+end
-diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
-index b51dc1c..7505cab 100644
---- a/spec/models/user_spec.rb
-+++ b/spec/models/user_spec.rb
-@@ -1,17 +1,49 @@
--require 'rails_helper'
-+# == Schema Information
-+#
-+# Table name: users
-+#
-+# id :bigint not null, primary key
-+# current_sign_in_at :datetime
-+# current_sign_in_ip :inet
-+# email :string default(""), not null
-+# encrypted_password :string default(""), not null
-+# first_name :string
-+# last_name :string
-+# last_sign_in_at :datetime
-+# last_sign_in_ip :inet
-+# remember_created_at :datetime
-+# reset_password_sent_at :datetime
-+# reset_password_token :string
-+# sign_in_count :integer default(0), not null
-+# created_at :datetime not null
-+# updated_at :datetime not null
-+#
-+# Indexes
-+#
-+# index_users_on_email (email) UNIQUE
-+# index_users_on_reset_password_token (reset_password_token) UNIQUE
-+#
-+require "rails_helper"
-
- describe User do
-- context "creating a new user" do
-- let(:attrs) do
-- { first_name: :foo, last_name: :bar, email: 'f@b.c', password: 'foobar123' }
-- end
-+ let(:attrs) do
-+ {first_name: :foo, last_name: :bar, email: "f@b.c", password: "foobar123"}
-+ end
-
-+ context "creating a new user" do
- it "should have first, last, email" do
-- expect { User.create(attrs) }.to change{ User.count }.by(1)
-+ expect { User.create(attrs) }.to change { User.count }.by(1)
- end
-
- it "should require a password" do
- expect(User.new(attrs.except(:password))).to be_invalid
- end
- end
-+
-+ describe "#full_name" do
-+ it "should be constructed from first/last name" do
-+ full_name = User.new(attrs).full_name
-+ expect(full_name).to eq "foo bar"
-+ end
-+ end
- end
-diff --git a/spec/rails_helper.rb b/spec/rails_helper.rb
-index bbe1ba5..4758f39 100644
---- a/spec/rails_helper.rb
-+++ b/spec/rails_helper.rb
-@@ -1,10 +1,10 @@
- # This file is copied to spec/ when you run 'rails generate rspec:install'
--require 'spec_helper'
--ENV['RAILS_ENV'] ||= 'test'
--require File.expand_path('../../config/environment', __FILE__)
-+require "spec_helper"
-+ENV["RAILS_ENV"] ||= "test"
-+require File.expand_path("../../config/environment", __FILE__)
- # Prevent database truncation if the environment is production
- abort("The Rails environment is running in production mode!") if Rails.env.production?
--require 'rspec/rails'
-+require "rspec/rails"
- # Add additional requires below this line. Rails is not loaded until this point!
-
- # Requires supporting ruby files with custom matchers and macros, etc, in
-@@ -27,9 +27,6 @@ require 'rspec/rails'
- ActiveRecord::Migration.maintain_test_schema!
-
- RSpec.configure do |config|
-- # Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
-- config.fixture_path = "#{::Rails.root}/spec/fixtures"
--
- # If you're not using ActiveRecord, or you'd prefer not to run each of your
- # examples within a transaction, remove the following line or assign false
- # instead of true.
-@@ -55,3 +52,18 @@ RSpec.configure do |config|
- # arbitrary gems may also be filtered via:
- # config.filter_gems_from_backtrace("gem name")
- end
-+
-+VCR.configure do |config|
-+ config.default_cassette_options = {
-+ record: :new_episodes,
-+ erb: true,
-+ decode_compressed_response: true
-+ }
-+ # will decompress binary responses before writing to cassettes
-+ config.before_record do |req|
-+ req.response.body.force_encoding("UTF-8")
-+ end
-+
-+ config.cassette_library_dir = "fixtures/vcr_cassettes"
-+ config.hook_into :typhoeus
-+end
-diff --git a/spec/repos/hacker_news_scraper_spec.rb b/spec/repos/hacker_news_scraper_spec.rb
-new file mode 100644
-index 0000000..cd9c8c9
---- /dev/null
-+++ b/spec/repos/hacker_news_scraper_spec.rb
-@@ -0,0 +1,154 @@
-+require "rails_helper"
-+
-+describe HackerNewsScraper do
-+ let(:scraper) { described_class.new }
-+
-+ describe "#fetch_top_story_ids" do
-+ it "should retrieve a json list of story ids" do
-+ story_ids = VCR.use_cassette("top_stories") do
-+ scraper.fetch_top_story_ids
-+ end
-+
-+ # this cassette was manually trimmed for length
-+ expect(story_ids).to match_array [
-+ 41119080,
-+ 41120254,
-+ 41119443
-+ ]
-+ end
-+ end
-+
-+ describe "#fetch_story_details" do
-+ it "should retrieve a specific story's meta data" do
-+ story_details = VCR.use_cassette("story_details_41119080") do
-+ scraper.fetch_story_details(41119080)
-+ end
-+
-+ expect(story_details).to include(
-+ by: "Brajeshwar",
-+ descendants: 52,
-+ id: 41119080,
-+ kids: [41120013, 41122192, 41121903, 41119817, 41120734, 41121026, 41119579, 41119896, 41119789, 41120995, 41119829],
-+ score: 125,
-+ time: 1722433249,
-+ title: "How great was the Great Oxidation Event?",
-+ type: "story",
-+ url: "https://eos.org/science-updates/how-great-was-the-great-oxidation-event"
-+ )
-+ end
-+
-+ it "should be able to interpret a binary response body" do
-+ story_details = VCR.use_cassette("story_details_41119443") do
-+ scraper.fetch_story_details(41119443)
-+ end
-+
-+ # In order to trim this cassette, I had to set a VCR config option:
-+ # req.response.body.force_encoding("UTF-8").
-+ # But this endpoint definitely returns a binary string response
-+ expect(story_details).to include(
-+ by: "darweenist",
-+ descendants: 59,
-+ id: 41119443,
-+ kids: [41122773, 41123293, 41122675],
-+ score: 77,
-+ text: "Hey HN! Dawson here from Martin (https://www.trymartin.com). Martin is a better Siri with an LLM brain and deeper integrations with everyday apps.",
-+ time: 1722435867,
-+ title: "Launch HN: Martin (YC S23) – Using LLMs to Make a Better Siri",
-+ type: "story"
-+ )
-+ end
-+ end
-+
-+ describe "#fetch_stories" do
-+ it "should retrieve multiple stories' meta data" do
-+ multi_story_details = VCR.use_cassette("story_details_multi") do
-+ scraper.fetch_stories([
-+ 41119080,
-+ 41120254,
-+ 41119443
-+ ])
-+ end
-+
-+ # this cassette was manually trimmed for length
-+ expect(multi_story_details).to match_array [
-+ {
-+ by: "Brajeshwar",
-+ descendants: 63,
-+ id: 41119080,
-+ kids: [41122732, 41119817, 41121903],
-+ score: 143,
-+ time: 1722433249,
-+ title: "How great was the Great Oxidation Event?",
-+ type: "story",
-+ url: "https://eos.org/science-updates/how-great-was-the-great-oxidation-event"
-+ },
-+ {
-+ by: "BerislavLopac",
-+ descendants: 236,
-+ id: 41120254,
-+ kids: [41123233, 41121444, 41123268],
-+ score: 212,
-+ time: 1722440995,
-+ title: "I prefer rST to Markdown",
-+ type: "story",
-+ url: "https://buttondown.email/hillelwayne/archive/why-i-prefer-rst-to-markdown/"
-+ },
-+ {
-+ by: "darweenist",
-+ descendants: 53,
-+ id: 41119443,
-+ kids: [41122773, 41122675, 41121790],
-+ score: 73,
-+ text: "Hey HN! Dawson here from Martin (https://www.trymartin.com). Martin is a better Siri with an LLM brain and deeper integrations with everyday apps.",
-+ time: 1722435867,
-+ title: "Launch HN: Martin (YC S23) – Using LLMs to Make a Better Siri",
-+ type: "story"
-+ }
-+ ]
-+ end
-+ end
-+
-+ describe ".retrieve_top_stories" do
-+ it "should do an end to end refresh of top story data" do
-+ all_top_stories = VCR.use_cassette("top_story_full_refresh") do
-+ described_class.retrieve_top_stories(
-+ cache_expiry: 0.seconds,
-+ relevant_fields: [:title]
-+ )
-+ end
-+
-+ # this cassette was HEAVILY trimmed for length
-+ expect(all_top_stories).to match_array [
-+ {
-+ title: "Suspicious data pattern in recent Venezuelan election"
-+ },
-+ {
-+ title: "How great was the Great Oxidation Event?"
-+ },
-+ {
-+ title: "Launch HN: Martin (YC S23) – Using LLMs to Make a Better Siri"
-+ }
-+ ]
-+ end
-+
-+ it "can optionally return the top x results" do
-+ some_top_stories = VCR.use_cassette("top_story_full_refresh") do
-+ described_class.retrieve_top_stories(
-+ limit: 2,
-+ cache_expiry: 0.seconds,
-+ relevant_fields: [:title]
-+ )
-+ end
-+
-+ # this cassette was HEAVILY trimmed for length
-+ expect(some_top_stories).to match_array [
-+ {
-+ title: "Suspicious data pattern in recent Venezuelan election"
-+ },
-+ {
-+ title: "How great was the Great Oxidation Event?"
-+ }
-+ ]
-+ end
-+ end
-+end
-diff --git a/spec/repos/like_repo_spec.rb b/spec/repos/like_repo_spec.rb
-new file mode 100644
-index 0000000..018542c
---- /dev/null
-+++ b/spec/repos/like_repo_spec.rb
-@@ -0,0 +1,120 @@
-+require "rails_helper"
-+
-+describe LikeRepo do
-+ let(:user_attrs) do
-+ {first_name: :foo, last_name: :bar, email: "f@b.c", password: "foobar123"}
-+ end
-+
-+ let(:user_a) {
-+ User.create(
-+ **user_attrs, email: "a@example.com", first_name: "user", last_name: "ayy"
-+ )
-+ }
-+ let(:user_b) {
-+ User.create(
-+ **user_attrs, email: "b@example.com", first_name: "user", last_name: "bee"
-+ )
-+ }
-+ let(:user_c) {
-+ User.create(
-+ **user_attrs, email: "c@example.com", first_name: "user", last_name: "see"
-+ )
-+ }
-+
-+ let(:repo) { described_class.new(user_a.id) }
-+
-+ describe "#toggle_like" do
-+ let(:story_id) { 999 }
-+
-+ it "should flip from liked to unliked and back on subsequent calls" do
-+ expect(described_class.fetch_likes(story_id)).to match_array []
-+
-+ repo.toggle_like(story_id)
-+ expect(described_class.fetch_likes(story_id)).to match_array [
-+ {
-+ user_id: user_a.id,
-+ name: "user ayy",
-+ story_id: story_id
-+ }
-+ ]
-+
-+ repo.toggle_like(story_id)
-+ expect(described_class.fetch_likes(story_id)).to match_array []
-+
-+ repo.toggle_like(story_id)
-+ expect(described_class.fetch_likes(story_id)).to match_array [
-+ {
-+ user_id: user_a.id,
-+ name: "user ayy",
-+ story_id: story_id
-+ }
-+ ]
-+ end
-+
-+ context "with multiple likers" do
-+ it "should show multiple names" do
-+ LikeRepo.new(user_a.id).toggle_like(story_id)
-+ LikeRepo.new(user_b.id).toggle_like(story_id)
-+
-+ expect(LikeRepo.fetch_likes(story_id)).to match_array [
-+ {
-+ user_id: user_a.id,
-+ name: "user ayy",
-+ story_id: story_id
-+ },
-+ {
-+ user_id: user_b.id,
-+ name: "user bee",
-+ story_id: story_id
-+ }
-+ ]
-+ end
-+ end
-+ end
-+
-+ describe ".fetch_grouped_likes" do
-+ it "should return a return a hash keyed by story_id" do
-+ LikeRepo.new(user_a.id)
-+ .toggle_like(100)
-+ .toggle_like(200)
-+
-+ LikeRepo.new(user_b.id)
-+ .toggle_like(200)
-+ .toggle_like(300)
-+
-+ # shouldn't be found
-+ LikeRepo.new(user_c.id)
-+ .toggle_like(500)
-+
-+ expect(LikeRepo.fetch_grouped_likes(
-+ [100, 200, 300, 400]
-+ )).to include(
-+ 100 => "user ayy",
-+ 200 => "user ayy, user bee",
-+ 300 => "user bee"
-+ )
-+ end
-+
-+ it "will return all likes if not given any story_ids" do
-+ LikeRepo.new(user_a.id)
-+ .toggle_like(100)
-+ .toggle_like(200)
-+
-+ LikeRepo.new(user_b.id)
-+ .toggle_like(200)
-+ .toggle_like(300)
-+
-+ # should be found
-+ LikeRepo.new(user_c.id)
-+ .toggle_like(500)
-+
-+ expect(LikeRepo.fetch_grouped_likes)
-+ .to include(
-+ 100 => "user ayy",
-+ 200 => "user bee, user ayy",
-+ 300 => "user bee",
-+ 500 => "user see"
-+ )
-+ end
-+ end
-+end