CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutSign UpSign In
rapid7

Real-time collaboration for Jupyter Notebooks, Linux Terminals, LaTeX, VS Code, R IDE, and more,
all in one place. Commercial Alternative to JupyterHub.

GitHub Repository: rapid7/metasploit-framework
Path: blob/master/tools/dev/find_and_replace_dead_reference_links.rb
Views: 17951
1
##
2
#
3
# tools/dev/detect_dead_reference_links.rb must be run before this script as it will
4
# create the url_check_results.json file that is used to run the script.
5
#
6
# Usage: ruby tools/dev/find_and_replace_dead_reference_links.rb -f url_check_results.json
7
#
8
##
9
10
require 'json'
11
require 'fileutils'
12
13
# Loads JSON data from the specified file.
14
# @param file_path [String] the path to the JSON file to load.
15
# @return [Array] parsed JSON data.
16
# @raise [Errno::ENOENT] if the file cannot be found.
17
# @raise [JSON::ParserError] if the JSON is malformed.
18
def load_json(file_path)
19
JSON.parse(File.read(file_path))
20
end
21
22
# Replaces the original URLs with archived snapshots in the content of files.
23
# This method processes each entry in the provided data, and if a valid
24
# archived snapshot is available, it replaces the URL in the corresponding file.
25
# @param data [Array] the array of data containing URL and archived_snapshot pairs.
26
# @return [void]
27
def replace_links_in_files(data)
28
data.each_with_index do |entry, index|
29
puts "Processing entry #{index + 1}: #{entry['url']} -> #{entry['archived_snapshot']}"
30
31
url = entry['url'].sub(/^URL-/, '')
32
path = entry['path']
33
archived_snapshot = entry['archived_snapshot']
34
35
# Skip entries with no archived version or errors fetching the snapshot
36
if archived_snapshot == 'No archived version found' || archived_snapshot.nil? || archived_snapshot.start_with?('Error fetching Wayback')
37
puts "Skipping entry #{index + 1} because no archived version is available or there was an error fetching it."
38
next
39
end
40
41
# Construct full file path and check if file exists
42
full_path = File.join(Dir.pwd, path)
43
44
if File.exist?(full_path)
45
file_content = File.read(full_path)
46
47
# Replace the original URL with the archived snapshot
48
updated_content = file_content.gsub(url, archived_snapshot)
49
50
# Write changes back to the file if any replacements were made
51
if file_content != updated_content
52
File.open(full_path, 'w') { |file| file.write(updated_content) }
53
puts "Replaced URL in file: #{full_path}"
54
else
55
puts "No change needed for file: #{full_path}"
56
end
57
else
58
puts "File not found: #{full_path}"
59
end
60
end
61
end
62
63
begin
64
# Load the JSON data from the file 'url_check_results.json'
65
json_data = load_json('url_check_results.json')
66
67
# Replace the URLs in files based on the loaded data
68
replace_links_in_files(json_data)
69
rescue StandardError => e
70
# Handle errors gracefully and provide meaningful feedback
71
puts "An error occurred: #{e.message}"
72
end
73
74